Add Jaguar Re-ID MegaDescriptor Space
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .dockerignore +18 -0
- Dockerfile +56 -0
- README.md +45 -5
- __pycache__/demo.cpython-312.pyc +0 -0
- demo.py +65 -0
- vendor/hyperview/__init__.py +14 -0
- vendor/hyperview/__pycache__/__init__.cpython-312.pyc +0 -0
- vendor/hyperview/__pycache__/_version.cpython-312.pyc +0 -0
- vendor/hyperview/__pycache__/api.cpython-312.pyc +0 -0
- vendor/hyperview/_version.py +34 -0
- vendor/hyperview/api.py +408 -0
- vendor/hyperview/cli.py +362 -0
- vendor/hyperview/core/__init__.py +6 -0
- vendor/hyperview/core/__pycache__/__init__.cpython-312.pyc +0 -0
- vendor/hyperview/core/__pycache__/dataset.cpython-312.pyc +0 -0
- vendor/hyperview/core/__pycache__/sample.cpython-312.pyc +0 -0
- vendor/hyperview/core/__pycache__/selection.cpython-312.pyc +0 -0
- vendor/hyperview/core/dataset.py +870 -0
- vendor/hyperview/core/sample.py +95 -0
- vendor/hyperview/core/selection.py +309 -0
- vendor/hyperview/embeddings/__init__.py +31 -0
- vendor/hyperview/embeddings/__pycache__/__init__.cpython-312.pyc +0 -0
- vendor/hyperview/embeddings/__pycache__/compute.cpython-312.pyc +0 -0
- vendor/hyperview/embeddings/__pycache__/engine.cpython-312.pyc +0 -0
- vendor/hyperview/embeddings/compute.py +89 -0
- vendor/hyperview/embeddings/engine.py +391 -0
- vendor/hyperview/embeddings/pipelines.py +265 -0
- vendor/hyperview/embeddings/projection.py +292 -0
- vendor/hyperview/embeddings/providers/__init__.py +7 -0
- vendor/hyperview/embeddings/providers/__pycache__/__init__.cpython-312.pyc +0 -0
- vendor/hyperview/embeddings/providers/__pycache__/lancedb_providers.cpython-312.pyc +0 -0
- vendor/hyperview/embeddings/providers/lancedb_providers.py +374 -0
- vendor/hyperview/server/__init__.py +5 -0
- vendor/hyperview/server/__pycache__/__init__.cpython-312.pyc +0 -0
- vendor/hyperview/server/__pycache__/app.cpython-312.pyc +0 -0
- vendor/hyperview/server/app.py +521 -0
- vendor/hyperview/server/static/404.html +1 -0
- vendor/hyperview/server/static/404/index.html +1 -0
- vendor/hyperview/server/static/__next.__PAGE__.txt +9 -0
- vendor/hyperview/server/static/__next._full.txt +18 -0
- vendor/hyperview/server/static/__next._head.txt +5 -0
- vendor/hyperview/server/static/__next._index.txt +5 -0
- vendor/hyperview/server/static/__next._tree.txt +3 -0
- vendor/hyperview/server/static/_next/static/0EHglxORKBJP1WlpL6tAy/_buildManifest.js +15 -0
- vendor/hyperview/server/static/_next/static/0EHglxORKBJP1WlpL6tAy/_clientMiddlewareManifest.json +1 -0
- vendor/hyperview/server/static/_next/static/0EHglxORKBJP1WlpL6tAy/_ssgManifest.js +1 -0
- vendor/hyperview/server/static/_next/static/chunks/462c5e072cd14e02.css +3 -0
- vendor/hyperview/server/static/_next/static/chunks/567993cf36cd4ab1.js +0 -0
- vendor/hyperview/server/static/_next/static/chunks/86c1fc4cf542f408.js +1 -0
- vendor/hyperview/server/static/_next/static/chunks/8d5a2ef3447cb3ee.js +0 -0
.dockerignore
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Git
|
| 2 |
+
.git
|
| 3 |
+
|
| 4 |
+
# Python
|
| 5 |
+
__pycache__
|
| 6 |
+
*.py[cod]
|
| 7 |
+
.venv
|
| 8 |
+
venv
|
| 9 |
+
|
| 10 |
+
# Caches
|
| 11 |
+
.mypy_cache
|
| 12 |
+
.pytest_cache
|
| 13 |
+
|
| 14 |
+
# Local runtime artifacts
|
| 15 |
+
demo_data
|
| 16 |
+
|
| 17 |
+
# Misc
|
| 18 |
+
.DS_Store
|
Dockerfile
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.11-slim
|
| 2 |
+
|
| 3 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 4 |
+
build-essential \
|
| 5 |
+
curl \
|
| 6 |
+
git \
|
| 7 |
+
libssl-dev \
|
| 8 |
+
pkg-config \
|
| 9 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 10 |
+
|
| 11 |
+
RUN useradd -m -u 1000 user
|
| 12 |
+
USER user
|
| 13 |
+
|
| 14 |
+
ENV HOME=/home/user \
|
| 15 |
+
PATH=/home/user/.local/bin:$PATH \
|
| 16 |
+
HF_HOME=/home/user/.cache/huggingface \
|
| 17 |
+
PYTHONUNBUFFERED=1 \
|
| 18 |
+
PIP_NO_CACHE_DIR=1
|
| 19 |
+
|
| 20 |
+
WORKDIR $HOME/app
|
| 21 |
+
|
| 22 |
+
RUN pip install --upgrade pip
|
| 23 |
+
|
| 24 |
+
RUN pip install \
|
| 25 |
+
"fastapi==0.128.0" \
|
| 26 |
+
"uvicorn[standard]==0.40.0" \
|
| 27 |
+
"numpy==2.3.5" \
|
| 28 |
+
"umap-learn==0.5.11" \
|
| 29 |
+
"Pillow==12.1.0" \
|
| 30 |
+
"pydantic==2.12.5" \
|
| 31 |
+
"aiofiles==25.1.0" \
|
| 32 |
+
"datasets==4.5.0" \
|
| 33 |
+
"lancedb==0.27.1" \
|
| 34 |
+
"pyarrow==22.0.0" \
|
| 35 |
+
"torch==2.9.1" \
|
| 36 |
+
"torchvision==0.24.1" \
|
| 37 |
+
"timm==1.0.24"
|
| 38 |
+
|
| 39 |
+
COPY --chown=user vendor ./vendor
|
| 40 |
+
COPY --chown=user demo.py ./demo.py
|
| 41 |
+
|
| 42 |
+
ENV PYTHONPATH=/home/user/app/vendor \
|
| 43 |
+
HYPERVIEW_DATASETS_DIR=/home/user/app/demo_data/datasets \
|
| 44 |
+
HYPERVIEW_MEDIA_DIR=/home/user/app/demo_data/media
|
| 45 |
+
|
| 46 |
+
RUN python -c "import hyperview; print('hyperview', hyperview.__version__)"
|
| 47 |
+
|
| 48 |
+
# Precompute at build time so the Space starts fast.
|
| 49 |
+
RUN python -c "from demo import build_dataset; build_dataset()"
|
| 50 |
+
|
| 51 |
+
EXPOSE 7860
|
| 52 |
+
|
| 53 |
+
HEALTHCHECK --interval=30s --timeout=10s --start-period=120s --retries=3 \
|
| 54 |
+
CMD curl -f http://localhost:7860/__hyperview__/health || exit 1
|
| 55 |
+
|
| 56 |
+
CMD ["python", "demo.py"]
|
README.md
CHANGED
|
@@ -1,10 +1,50 @@
|
|
| 1 |
---
|
| 2 |
-
title: HyperView
|
| 3 |
-
emoji:
|
| 4 |
-
colorFrom:
|
| 5 |
-
colorTo:
|
| 6 |
sdk: docker
|
|
|
|
| 7 |
pinned: false
|
| 8 |
---
|
| 9 |
|
| 10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
---
|
| 2 |
+
title: HyperView-Jaguar-ReID
|
| 3 |
+
emoji: 🐆
|
| 4 |
+
colorFrom: yellow
|
| 5 |
+
colorTo: green
|
| 6 |
sdk: docker
|
| 7 |
+
app_port: 7860
|
| 8 |
pinned: false
|
| 9 |
---
|
| 10 |
|
| 11 |
+
# HyperView - Jaguar Re-ID (MegaDescriptor + Sphere)
|
| 12 |
+
|
| 13 |
+
This Space runs the Jaguar Re-ID dataset through the MegaDescriptor timm
|
| 14 |
+
backbone and renders the result with HyperView's spherical 3D layout.
|
| 15 |
+
|
| 16 |
+
Unlike the Imagenette starter, this folder vendors the current HyperView source
|
| 17 |
+
under `vendor/hyperview/`. The released `hyperview==0.2.0` wheel does not yet
|
| 18 |
+
include the `timm-image` provider or spherical layout support required by this
|
| 19 |
+
demo, so the Space builds against the local source snapshot instead of PyPI.
|
| 20 |
+
|
| 21 |
+
This demo uses:
|
| 22 |
+
|
| 23 |
+
- Hugging Face dataset `hyper3labs/jaguar-re-id`
|
| 24 |
+
- Config `default`
|
| 25 |
+
- Split `train`
|
| 26 |
+
- Image field `image`
|
| 27 |
+
- Label field `label`
|
| 28 |
+
- Sample count `200`
|
| 29 |
+
- Embedding model `hf-hub:BVRA/MegaDescriptor-L-384`
|
| 30 |
+
- Layout `spherical` (3D)
|
| 31 |
+
|
| 32 |
+
## Build model
|
| 33 |
+
|
| 34 |
+
The Dockerfile precomputes the dataset, embeddings, and layout during image
|
| 35 |
+
build so the runtime container only needs to launch HyperView.
|
| 36 |
+
|
| 37 |
+
Because MegaDescriptor inference runs during Docker build on CPU, this Space
|
| 38 |
+
keeps the sample count modest and uses a smaller batch size than the local demo
|
| 39 |
+
script to stay within typical Hugging Face build limits.
|
| 40 |
+
|
| 41 |
+
## Vendored source
|
| 42 |
+
|
| 43 |
+
This folder includes a vendored `vendor/hyperview/` snapshot copied from the
|
| 44 |
+
main HyperView repository. Keep that snapshot in sync with the local repo if
|
| 45 |
+
you update the jaguar demo or any unreleased HyperView behavior it depends on.
|
| 46 |
+
|
| 47 |
+
## Deploy source
|
| 48 |
+
|
| 49 |
+
This folder is synchronized to Hugging Face Spaces by GitHub Actions from the
|
| 50 |
+
`hyperview-spaces` deployment repository.
|
__pycache__/demo.cpython-312.pyc
ADDED
|
Binary file (2.69 kB). View file
|
|
|
demo.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
"""HyperView Jaguar Re-ID Hugging Face Space."""
|
| 3 |
+
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
import sys
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
|
| 9 |
+
SPACE_HOST = "0.0.0.0"
|
| 10 |
+
SPACE_PORT = 7860
|
| 11 |
+
|
| 12 |
+
DATASET_NAME = "jaguar_reid_megadescriptor_spherical_space"
|
| 13 |
+
HF_DATASET = "hyper3labs/jaguar-re-id"
|
| 14 |
+
HF_CONFIG = "default"
|
| 15 |
+
HF_SPLIT = "train"
|
| 16 |
+
HF_IMAGE_KEY = "image"
|
| 17 |
+
HF_LABEL_KEY = "label"
|
| 18 |
+
SAMPLE_COUNT = 200
|
| 19 |
+
MODEL_ID = "hf-hub:BVRA/MegaDescriptor-L-384"
|
| 20 |
+
BATCH_SIZE = 4
|
| 21 |
+
|
| 22 |
+
ROOT = Path(__file__).resolve().parent
|
| 23 |
+
VENDOR_DIR = ROOT / "vendor"
|
| 24 |
+
if str(VENDOR_DIR) not in sys.path:
|
| 25 |
+
sys.path.insert(0, str(VENDOR_DIR))
|
| 26 |
+
|
| 27 |
+
import hyperview as hv
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def build_dataset() -> hv.Dataset:
|
| 31 |
+
dataset = hv.Dataset(DATASET_NAME)
|
| 32 |
+
|
| 33 |
+
if len(dataset) == 0:
|
| 34 |
+
print(f"Loading {SAMPLE_COUNT} samples from {HF_DATASET} [{HF_CONFIG}] ({HF_SPLIT})...")
|
| 35 |
+
dataset.add_from_huggingface(
|
| 36 |
+
HF_DATASET,
|
| 37 |
+
config=HF_CONFIG,
|
| 38 |
+
split=HF_SPLIT,
|
| 39 |
+
image_key=HF_IMAGE_KEY,
|
| 40 |
+
label_key=HF_LABEL_KEY,
|
| 41 |
+
max_samples=SAMPLE_COUNT,
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
print(f"Ensuring MegaDescriptor embeddings ({MODEL_ID})...")
|
| 45 |
+
space_key = dataset.compute_embeddings(
|
| 46 |
+
model=MODEL_ID,
|
| 47 |
+
provider="timm-image",
|
| 48 |
+
batch_size=BATCH_SIZE,
|
| 49 |
+
show_progress=True,
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
print("Ensuring spherical layout...")
|
| 53 |
+
dataset.compute_visualization(space_key=space_key, layout="spherical")
|
| 54 |
+
|
| 55 |
+
return dataset
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def main() -> None:
|
| 59 |
+
dataset = build_dataset()
|
| 60 |
+
print(f"Starting HyperView on {SPACE_HOST}:{SPACE_PORT}")
|
| 61 |
+
hv.launch(dataset, host=SPACE_HOST, port=SPACE_PORT, open_browser=False)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
if __name__ == "__main__":
|
| 65 |
+
main()
|
vendor/hyperview/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""HyperView - Open-source dataset curation with hyperbolic embeddings visualization."""
|
| 2 |
+
|
| 3 |
+
from . import _version as _version
|
| 4 |
+
from . import api as _api
|
| 5 |
+
|
| 6 |
+
Dataset = _api.Dataset
|
| 7 |
+
launch = _api.launch
|
| 8 |
+
__version__ = _version.__version__
|
| 9 |
+
|
| 10 |
+
__all__ = [
|
| 11 |
+
"Dataset",
|
| 12 |
+
"launch",
|
| 13 |
+
"__version__",
|
| 14 |
+
]
|
vendor/hyperview/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (564 Bytes). View file
|
|
|
vendor/hyperview/__pycache__/_version.cpython-312.pyc
ADDED
|
Binary file (885 Bytes). View file
|
|
|
vendor/hyperview/__pycache__/api.cpython-312.pyc
ADDED
|
Binary file (18.2 kB). View file
|
|
|
vendor/hyperview/_version.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# file generated by setuptools-scm
|
| 2 |
+
# don't change, don't track in version control
|
| 3 |
+
|
| 4 |
+
__all__ = [
|
| 5 |
+
"__version__",
|
| 6 |
+
"__version_tuple__",
|
| 7 |
+
"version",
|
| 8 |
+
"version_tuple",
|
| 9 |
+
"__commit_id__",
|
| 10 |
+
"commit_id",
|
| 11 |
+
]
|
| 12 |
+
|
| 13 |
+
TYPE_CHECKING = False
|
| 14 |
+
if TYPE_CHECKING:
|
| 15 |
+
from typing import Tuple
|
| 16 |
+
from typing import Union
|
| 17 |
+
|
| 18 |
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
| 19 |
+
COMMIT_ID = Union[str, None]
|
| 20 |
+
else:
|
| 21 |
+
VERSION_TUPLE = object
|
| 22 |
+
COMMIT_ID = object
|
| 23 |
+
|
| 24 |
+
version: str
|
| 25 |
+
__version__: str
|
| 26 |
+
__version_tuple__: VERSION_TUPLE
|
| 27 |
+
version_tuple: VERSION_TUPLE
|
| 28 |
+
commit_id: COMMIT_ID
|
| 29 |
+
__commit_id__: COMMIT_ID
|
| 30 |
+
|
| 31 |
+
__version__ = version = '0.2.1.dev2+g55532b8e3.d20260307'
|
| 32 |
+
__version_tuple__ = version_tuple = (0, 2, 1, 'dev2', 'g55532b8e3.d20260307')
|
| 33 |
+
|
| 34 |
+
__commit_id__ = commit_id = None
|
vendor/hyperview/api.py
ADDED
|
@@ -0,0 +1,408 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Public API for HyperView."""
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
import os
|
| 5 |
+
import socket
|
| 6 |
+
import threading
|
| 7 |
+
import time
|
| 8 |
+
import webbrowser
|
| 9 |
+
from dataclasses import dataclass
|
| 10 |
+
from urllib.error import URLError
|
| 11 |
+
from urllib.request import Request, urlopen
|
| 12 |
+
from uuid import uuid4
|
| 13 |
+
|
| 14 |
+
import uvicorn
|
| 15 |
+
|
| 16 |
+
from hyperview.core.dataset import Dataset
|
| 17 |
+
from hyperview.server.app import create_app, set_dataset
|
| 18 |
+
|
| 19 |
+
__all__ = ["Dataset", "launch", "Session"]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@dataclass(frozen=True)
|
| 23 |
+
class _HealthResponse:
|
| 24 |
+
name: str | None
|
| 25 |
+
session_id: str | None
|
| 26 |
+
dataset: str | None
|
| 27 |
+
pid: int | None
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _can_connect(host: str, port: int, timeout_s: float) -> bool:
|
| 31 |
+
try:
|
| 32 |
+
with socket.create_connection((host, port), timeout=timeout_s):
|
| 33 |
+
return True
|
| 34 |
+
except OSError:
|
| 35 |
+
return False
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def _try_read_health(url: str, timeout_s: float) -> _HealthResponse | None:
|
| 39 |
+
try:
|
| 40 |
+
return _read_health(url, timeout_s=timeout_s)
|
| 41 |
+
except (URLError, TimeoutError, OSError, ValueError, json.JSONDecodeError):
|
| 42 |
+
return None
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def _read_health(url: str, timeout_s: float) -> _HealthResponse:
|
| 46 |
+
request = Request(url, headers={"Accept": "application/json"})
|
| 47 |
+
with urlopen(request, timeout=timeout_s) as response:
|
| 48 |
+
data = json.loads(response.read().decode("utf-8"))
|
| 49 |
+
|
| 50 |
+
return _HealthResponse(
|
| 51 |
+
name=data.get("name"),
|
| 52 |
+
session_id=data.get("session_id"),
|
| 53 |
+
dataset=data.get("dataset"),
|
| 54 |
+
pid=data.get("pid") if isinstance(data.get("pid"), int) else None,
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def _resolve_default_launch_layout(dataset: Dataset) -> str:
|
| 59 |
+
spaces = dataset.list_spaces()
|
| 60 |
+
|
| 61 |
+
if any(space.geometry not in ("hyperboloid", "hypersphere") for space in spaces):
|
| 62 |
+
return "euclidean:2d"
|
| 63 |
+
if any(space.geometry == "hypersphere" for space in spaces):
|
| 64 |
+
return "spherical:3d"
|
| 65 |
+
return "poincare:2d"
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class Session:
|
| 69 |
+
"""A session for the HyperView visualizer."""
|
| 70 |
+
|
| 71 |
+
def __init__(self, dataset: Dataset, host: str, port: int):
|
| 72 |
+
self.dataset = dataset
|
| 73 |
+
self.host = host
|
| 74 |
+
self.port = port
|
| 75 |
+
# Prefer a browser-connectable host for user-facing URLs.
|
| 76 |
+
# When binding to 0.0.0.0, users should connect via 127.0.0.1 locally.
|
| 77 |
+
self.url = f"http://{self._connect_host}:{port}"
|
| 78 |
+
self._server_thread: threading.Thread | None = None
|
| 79 |
+
self._server: uvicorn.Server | None = None
|
| 80 |
+
self._startup_error: BaseException | None = None
|
| 81 |
+
self.session_id = uuid4().hex
|
| 82 |
+
|
| 83 |
+
@property
|
| 84 |
+
def _connect_host(self) -> str:
|
| 85 |
+
return "127.0.0.1" if self.host == "0.0.0.0" else self.host
|
| 86 |
+
|
| 87 |
+
@property
|
| 88 |
+
def _health_url(self) -> str:
|
| 89 |
+
return f"http://{self._connect_host}:{self.port}/__hyperview__/health"
|
| 90 |
+
|
| 91 |
+
def _run_server(self):
|
| 92 |
+
try:
|
| 93 |
+
set_dataset(self.dataset)
|
| 94 |
+
app = create_app(self.dataset, session_id=self.session_id)
|
| 95 |
+
config = uvicorn.Config(app, host=self.host, port=self.port, log_level="warning")
|
| 96 |
+
self._server = uvicorn.Server(config)
|
| 97 |
+
self._server.run()
|
| 98 |
+
except BaseException as exc:
|
| 99 |
+
self._startup_error = exc
|
| 100 |
+
|
| 101 |
+
def start(self, background: bool = True):
|
| 102 |
+
"""Start the visualizer server."""
|
| 103 |
+
if not background:
|
| 104 |
+
self._run_server()
|
| 105 |
+
return
|
| 106 |
+
|
| 107 |
+
# Fail fast if something is already listening on this port.
|
| 108 |
+
if _can_connect(self._connect_host, self.port, timeout_s=0.2):
|
| 109 |
+
health = _try_read_health(self._health_url, timeout_s=0.2)
|
| 110 |
+
if health is not None and health.name == "hyperview":
|
| 111 |
+
raise RuntimeError(
|
| 112 |
+
"HyperView failed to start because the port is already serving "
|
| 113 |
+
f"HyperView (port={self.port}, session_id={health.session_id}). "
|
| 114 |
+
"Choose a different port or stop the existing server."
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
raise RuntimeError(
|
| 118 |
+
"HyperView failed to start because the port is already in use "
|
| 119 |
+
f"by a non-HyperView service (port={self.port}). Choose a different "
|
| 120 |
+
"port or stop the process listening on that port."
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
self._startup_error = None
|
| 124 |
+
self._server_thread = threading.Thread(target=self._run_server, daemon=True)
|
| 125 |
+
self._server_thread.start()
|
| 126 |
+
|
| 127 |
+
deadline = time.time() + 5.0
|
| 128 |
+
last_health_error: Exception | None = None
|
| 129 |
+
|
| 130 |
+
while time.time() < deadline:
|
| 131 |
+
if self._startup_error is not None:
|
| 132 |
+
raise RuntimeError(
|
| 133 |
+
f"HyperView server failed to start (port={self.port}): "
|
| 134 |
+
f"{type(self._startup_error).__name__}: {self._startup_error}"
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
if self._server_thread is not None and not self._server_thread.is_alive():
|
| 138 |
+
raise RuntimeError(
|
| 139 |
+
"HyperView server thread exited during startup. "
|
| 140 |
+
f"The port may be in use (port={self.port})."
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
+
try:
|
| 144 |
+
health = _read_health(self._health_url, timeout_s=0.2)
|
| 145 |
+
except (URLError, TimeoutError, OSError, ValueError, json.JSONDecodeError) as exc:
|
| 146 |
+
last_health_error = exc
|
| 147 |
+
time.sleep(0.05)
|
| 148 |
+
continue
|
| 149 |
+
|
| 150 |
+
if health.name == "hyperview" and health.session_id == self.session_id:
|
| 151 |
+
return
|
| 152 |
+
|
| 153 |
+
if health.name == "hyperview":
|
| 154 |
+
raise RuntimeError(
|
| 155 |
+
"HyperView failed to start because the port is already serving "
|
| 156 |
+
f"a different HyperView session (port={self.port}, "
|
| 157 |
+
f"session_id={health.session_id})."
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
raise RuntimeError(
|
| 161 |
+
"HyperView failed to start because the port is already serving "
|
| 162 |
+
f"a non-HyperView app (port={self.port})."
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
raise TimeoutError(
|
| 166 |
+
"HyperView server did not become ready in time "
|
| 167 |
+
f"(port={self.port}). Last error: {last_health_error}"
|
| 168 |
+
)
|
| 169 |
+
|
| 170 |
+
def stop(self):
|
| 171 |
+
"""Stop the visualizer server."""
|
| 172 |
+
if self._server:
|
| 173 |
+
self._server.should_exit = True
|
| 174 |
+
|
| 175 |
+
def show(self, height: int = 800):
|
| 176 |
+
"""Display the visualizer in a notebook.
|
| 177 |
+
|
| 178 |
+
In Google Colab, notebook kernels cannot be accessed via localhost.
|
| 179 |
+
Colab exposes kernel ports through a proxy URL (see
|
| 180 |
+
`google.colab.kernel.proxyPort`). This renders a link to the proxied URL
|
| 181 |
+
that opens in a new tab.
|
| 182 |
+
|
| 183 |
+
In other notebook environments, it renders a clickable link to the local
|
| 184 |
+
URL and a best-effort JavaScript auto-open.
|
| 185 |
+
"""
|
| 186 |
+
if _is_colab():
|
| 187 |
+
try:
|
| 188 |
+
from google.colab.output import eval_js # type: ignore[import-not-found]
|
| 189 |
+
from IPython.display import HTML, display
|
| 190 |
+
|
| 191 |
+
proxy_url = eval_js(f"google.colab.kernel.proxyPort({self.port})")
|
| 192 |
+
app_url = str(proxy_url).rstrip("/") + "/"
|
| 193 |
+
|
| 194 |
+
display(
|
| 195 |
+
HTML(
|
| 196 |
+
"<p>HyperView is running in Colab. "
|
| 197 |
+
f"<a href=\"{app_url}\" target=\"_blank\" rel=\"noopener noreferrer\">"
|
| 198 |
+
"Open HyperView in a new tab</a>.</p>"
|
| 199 |
+
)
|
| 200 |
+
)
|
| 201 |
+
display(HTML(f"<p style=\"font-size:12px;color:#666;\">{app_url}</p>"))
|
| 202 |
+
return
|
| 203 |
+
except Exception:
|
| 204 |
+
# Fall through to the generic notebook behavior.
|
| 205 |
+
pass
|
| 206 |
+
|
| 207 |
+
# Default: open in a new browser tab (works well for Jupyter).
|
| 208 |
+
try:
|
| 209 |
+
from IPython.display import HTML, Javascript, display
|
| 210 |
+
|
| 211 |
+
display(
|
| 212 |
+
HTML(
|
| 213 |
+
"<p>HyperView is running. "
|
| 214 |
+
f"<a href=\"{self.url}\" target=\"_blank\" rel=\"noopener\">Open in a new tab</a>."
|
| 215 |
+
"</p>"
|
| 216 |
+
)
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
# Best-effort auto-open. Some browsers may block popups.
|
| 220 |
+
display(Javascript(f'window.open("{self.url}", "_blank");'))
|
| 221 |
+
except ImportError:
|
| 222 |
+
print(f"IPython not installed. Please visit {self.url} in your browser.")
|
| 223 |
+
|
| 224 |
+
def open_browser(self):
|
| 225 |
+
"""Open the visualizer in a browser window."""
|
| 226 |
+
webbrowser.open(self.url)
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
def launch(
|
| 230 |
+
dataset: Dataset,
|
| 231 |
+
port: int = 6262,
|
| 232 |
+
host: str = "127.0.0.1",
|
| 233 |
+
open_browser: bool = True,
|
| 234 |
+
notebook: bool | None = None,
|
| 235 |
+
height: int = 800,
|
| 236 |
+
reuse_server: bool = False,
|
| 237 |
+
) -> Session:
|
| 238 |
+
"""Launch the HyperView visualization server.
|
| 239 |
+
|
| 240 |
+
Note:
|
| 241 |
+
HyperView needs at least one visualization to display. If no layouts
|
| 242 |
+
exist yet but embedding spaces do, this function computes one default
|
| 243 |
+
layout automatically.
|
| 244 |
+
|
| 245 |
+
Args:
|
| 246 |
+
dataset: The dataset to visualize.
|
| 247 |
+
port: Port to run the server on.
|
| 248 |
+
host: Host to bind to.
|
| 249 |
+
open_browser: Whether to open a browser window.
|
| 250 |
+
notebook: Whether to display in a notebook. If None, auto-detects.
|
| 251 |
+
height: Height of the iframe in the notebook.
|
| 252 |
+
reuse_server: If True, and the requested port is already serving HyperView,
|
| 253 |
+
attach to the existing server instead of starting a new one. For safety,
|
| 254 |
+
this will only attach when the existing server reports the same dataset
|
| 255 |
+
name (via `/__hyperview__/health`).
|
| 256 |
+
|
| 257 |
+
Returns:
|
| 258 |
+
A Session object.
|
| 259 |
+
|
| 260 |
+
Example:
|
| 261 |
+
>>> import hyperview as hv
|
| 262 |
+
>>> dataset = hv.Dataset("my_dataset")
|
| 263 |
+
>>> dataset.add_images_dir("/path/to/images", label_from_folder=True)
|
| 264 |
+
>>> dataset.compute_embeddings(model="openai/clip-vit-base-patch32")
|
| 265 |
+
>>> dataset.compute_visualization()
|
| 266 |
+
>>> hv.launch(dataset)
|
| 267 |
+
"""
|
| 268 |
+
if notebook is None:
|
| 269 |
+
# Colab is always a notebook environment, even if _is_notebook() fails to detect it
|
| 270 |
+
notebook = _is_notebook() or _is_colab()
|
| 271 |
+
|
| 272 |
+
if _is_colab() and host == "127.0.0.1":
|
| 273 |
+
# Colab port forwarding/proxying is most reliable when the server binds
|
| 274 |
+
# to all interfaces.
|
| 275 |
+
host = "0.0.0.0"
|
| 276 |
+
|
| 277 |
+
# Preflight: avoid doing expensive work if the port is already in use.
|
| 278 |
+
# If it's already serving HyperView and reuse_server=True, we can safely attach.
|
| 279 |
+
connect_host = "127.0.0.1" if host == "0.0.0.0" else host
|
| 280 |
+
health_url = f"http://{connect_host}:{port}/__hyperview__/health"
|
| 281 |
+
|
| 282 |
+
if _can_connect(connect_host, port, timeout_s=0.2):
|
| 283 |
+
health = _try_read_health(health_url, timeout_s=0.2)
|
| 284 |
+
if health is not None and health.name == "hyperview":
|
| 285 |
+
if not reuse_server:
|
| 286 |
+
raise RuntimeError(
|
| 287 |
+
"HyperView failed to start because the port is already serving "
|
| 288 |
+
f"HyperView (port={port}, dataset={health.dataset}, "
|
| 289 |
+
f"session_id={health.session_id}, pid={health.pid}). "
|
| 290 |
+
"Choose a different port, stop the existing server, or pass "
|
| 291 |
+
"reuse_server=True to attach."
|
| 292 |
+
)
|
| 293 |
+
|
| 294 |
+
if health.dataset is not None and health.dataset != dataset.name:
|
| 295 |
+
raise RuntimeError(
|
| 296 |
+
"HyperView refused to attach to the existing server because it is "
|
| 297 |
+
f"serving a different dataset (port={port}, dataset={health.dataset}). "
|
| 298 |
+
f"Requested dataset={dataset.name}. Stop the existing server or "
|
| 299 |
+
"choose a different port."
|
| 300 |
+
)
|
| 301 |
+
|
| 302 |
+
session = Session(dataset, host, port)
|
| 303 |
+
if health.session_id is not None:
|
| 304 |
+
session.session_id = health.session_id
|
| 305 |
+
|
| 306 |
+
if notebook:
|
| 307 |
+
if _is_colab():
|
| 308 |
+
print(
|
| 309 |
+
f"\nHyperView is already running (Colab, port={session.port}). "
|
| 310 |
+
"Use the link below to open it."
|
| 311 |
+
)
|
| 312 |
+
else:
|
| 313 |
+
print(
|
| 314 |
+
f"\nHyperView is already running at {session.url} (port={session.port}). "
|
| 315 |
+
"Opening a new tab..."
|
| 316 |
+
)
|
| 317 |
+
session.show(height=height)
|
| 318 |
+
else:
|
| 319 |
+
print(f"\nHyperView is already running at {session.url} (port={session.port}).")
|
| 320 |
+
if open_browser:
|
| 321 |
+
session.open_browser()
|
| 322 |
+
|
| 323 |
+
return session
|
| 324 |
+
|
| 325 |
+
raise RuntimeError(
|
| 326 |
+
"HyperView failed to start because the port is already in use "
|
| 327 |
+
f"by a non-HyperView service (port={port}). Choose a different "
|
| 328 |
+
"port or stop the process listening on that port."
|
| 329 |
+
)
|
| 330 |
+
|
| 331 |
+
layouts = dataset.list_layouts()
|
| 332 |
+
spaces = dataset.list_spaces()
|
| 333 |
+
|
| 334 |
+
if not layouts and not spaces:
|
| 335 |
+
raise ValueError(
|
| 336 |
+
"HyperView launch requires at least one visualization or embedding space. "
|
| 337 |
+
"No visualizations or embedding spaces were found. "
|
| 338 |
+
"Call `dataset.compute_embeddings()` and `dataset.compute_visualization()` "
|
| 339 |
+
"or `dataset.set_coords()` before `hv.launch()`."
|
| 340 |
+
)
|
| 341 |
+
|
| 342 |
+
if not layouts:
|
| 343 |
+
default_layout = _resolve_default_launch_layout(dataset)
|
| 344 |
+
|
| 345 |
+
print(f"No visualizations found. Computing {default_layout} visualization...")
|
| 346 |
+
# Let compute_visualization pick the most appropriate default space.
|
| 347 |
+
dataset.compute_visualization(
|
| 348 |
+
space_key=None,
|
| 349 |
+
layout=default_layout,
|
| 350 |
+
)
|
| 351 |
+
|
| 352 |
+
session = Session(dataset, host, port)
|
| 353 |
+
|
| 354 |
+
if notebook:
|
| 355 |
+
session.start(background=True)
|
| 356 |
+
if _is_colab():
|
| 357 |
+
print(
|
| 358 |
+
f"\nHyperView is running (Colab, port={session.port}). "
|
| 359 |
+
"Use the link below to open it."
|
| 360 |
+
)
|
| 361 |
+
else:
|
| 362 |
+
print(f"\nHyperView is running at {session.url}. Opening a new tab...")
|
| 363 |
+
session.show(height=height)
|
| 364 |
+
else:
|
| 365 |
+
session.start(background=True)
|
| 366 |
+
print(" Press Ctrl+C to stop.\n")
|
| 367 |
+
print(f"\nHyperView is running at {session.url}")
|
| 368 |
+
|
| 369 |
+
if open_browser:
|
| 370 |
+
session.open_browser()
|
| 371 |
+
|
| 372 |
+
try:
|
| 373 |
+
while True:
|
| 374 |
+
# Keep the main thread alive so the daemon server thread can run.
|
| 375 |
+
time.sleep(0.25)
|
| 376 |
+
if session._server_thread is not None and not session._server_thread.is_alive():
|
| 377 |
+
raise RuntimeError("HyperView server stopped unexpectedly.")
|
| 378 |
+
except KeyboardInterrupt:
|
| 379 |
+
pass
|
| 380 |
+
finally:
|
| 381 |
+
session.stop()
|
| 382 |
+
if session._server_thread is not None:
|
| 383 |
+
session._server_thread.join(timeout=2.0)
|
| 384 |
+
|
| 385 |
+
return session
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
def _is_notebook() -> bool:
|
| 389 |
+
"""Check if running in a notebook environment."""
|
| 390 |
+
try:
|
| 391 |
+
from IPython import get_ipython
|
| 392 |
+
except ImportError:
|
| 393 |
+
return False
|
| 394 |
+
|
| 395 |
+
shell = get_ipython()
|
| 396 |
+
return shell is not None and shell.__class__.__name__ == "ZMQInteractiveShell"
|
| 397 |
+
|
| 398 |
+
|
| 399 |
+
def _is_colab() -> bool:
|
| 400 |
+
"""Check if running inside a Google Colab notebook runtime."""
|
| 401 |
+
if os.environ.get("COLAB_RELEASE_TAG"):
|
| 402 |
+
return True
|
| 403 |
+
try:
|
| 404 |
+
import google.colab # type: ignore[import-not-found]
|
| 405 |
+
|
| 406 |
+
return True
|
| 407 |
+
except ImportError:
|
| 408 |
+
return False
|
vendor/hyperview/cli.py
ADDED
|
@@ -0,0 +1,362 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Command-line interface for HyperView."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import argparse
|
| 6 |
+
|
| 7 |
+
from hyperview import Dataset, launch
|
| 8 |
+
from hyperview.core.dataset import parse_visualization_layout
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def _build_parser() -> argparse.ArgumentParser:
|
| 12 |
+
parser = argparse.ArgumentParser(
|
| 13 |
+
prog="hyperview",
|
| 14 |
+
description="HyperView - Dataset visualization with hyperbolic embeddings",
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
parser.add_argument(
|
| 18 |
+
"--dataset",
|
| 19 |
+
type=str,
|
| 20 |
+
default=None,
|
| 21 |
+
help=(
|
| 22 |
+
"Dataset name in persistent storage. Required unless "
|
| 23 |
+
"--dataset-json is provided."
|
| 24 |
+
),
|
| 25 |
+
)
|
| 26 |
+
parser.add_argument(
|
| 27 |
+
"--dataset-json",
|
| 28 |
+
type=str,
|
| 29 |
+
help="Path to exported dataset JSON file (loads samples into memory)",
|
| 30 |
+
)
|
| 31 |
+
parser.add_argument(
|
| 32 |
+
"--hf-dataset",
|
| 33 |
+
type=str,
|
| 34 |
+
help="HuggingFace dataset ID to ingest before launch (e.g. uoft-cs/cifar10)",
|
| 35 |
+
)
|
| 36 |
+
parser.add_argument(
|
| 37 |
+
"--split",
|
| 38 |
+
type=str,
|
| 39 |
+
default=None,
|
| 40 |
+
help="HuggingFace split to use (required with --hf-dataset)",
|
| 41 |
+
)
|
| 42 |
+
parser.add_argument(
|
| 43 |
+
"--hf-config",
|
| 44 |
+
type=str,
|
| 45 |
+
default=None,
|
| 46 |
+
help="Optional HuggingFace subset/configuration to use",
|
| 47 |
+
)
|
| 48 |
+
parser.add_argument(
|
| 49 |
+
"--image-key",
|
| 50 |
+
type=str,
|
| 51 |
+
default=None,
|
| 52 |
+
help="Image column key for HuggingFace ingestion (required with --hf-dataset)",
|
| 53 |
+
)
|
| 54 |
+
parser.add_argument(
|
| 55 |
+
"--label-key",
|
| 56 |
+
type=str,
|
| 57 |
+
default=None,
|
| 58 |
+
help="Label column key for HuggingFace ingestion (optional)",
|
| 59 |
+
)
|
| 60 |
+
parser.add_argument(
|
| 61 |
+
"--label-names-key",
|
| 62 |
+
type=str,
|
| 63 |
+
default=None,
|
| 64 |
+
help="Optional dataset info key containing label names",
|
| 65 |
+
)
|
| 66 |
+
parser.add_argument(
|
| 67 |
+
"--images-dir",
|
| 68 |
+
type=str,
|
| 69 |
+
help="Local directory of images to ingest before launch",
|
| 70 |
+
)
|
| 71 |
+
parser.add_argument(
|
| 72 |
+
"--label-from-folder",
|
| 73 |
+
action="store_true",
|
| 74 |
+
help="When using --images-dir, derive label from parent folder name",
|
| 75 |
+
)
|
| 76 |
+
parser.add_argument(
|
| 77 |
+
"--samples",
|
| 78 |
+
type=int,
|
| 79 |
+
default=None,
|
| 80 |
+
help="Maximum number of ingested samples (omit to load all)",
|
| 81 |
+
)
|
| 82 |
+
parser.add_argument(
|
| 83 |
+
"--hf-streaming",
|
| 84 |
+
action="store_true",
|
| 85 |
+
help=(
|
| 86 |
+
"Stream HuggingFace rows instead of materializing the full split first. "
|
| 87 |
+
"Useful for loading subsets without eager full-split downloads."
|
| 88 |
+
),
|
| 89 |
+
)
|
| 90 |
+
parser.add_argument(
|
| 91 |
+
"--shuffle",
|
| 92 |
+
action="store_true",
|
| 93 |
+
help="Shuffle HuggingFace dataset before sampling",
|
| 94 |
+
)
|
| 95 |
+
parser.add_argument(
|
| 96 |
+
"--seed",
|
| 97 |
+
type=int,
|
| 98 |
+
default=42,
|
| 99 |
+
help="Random seed used when --shuffle is enabled (default: 42)",
|
| 100 |
+
)
|
| 101 |
+
parser.add_argument(
|
| 102 |
+
"--hf-shuffle-buffer-size",
|
| 103 |
+
type=int,
|
| 104 |
+
default=1000,
|
| 105 |
+
help=(
|
| 106 |
+
"Shuffle buffer size used with --hf-streaming and --shuffle. "
|
| 107 |
+
"Streaming shuffle is approximate and trades larger buffers for more read-ahead."
|
| 108 |
+
),
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
parser.add_argument(
|
| 112 |
+
"--model",
|
| 113 |
+
type=str,
|
| 114 |
+
default=None,
|
| 115 |
+
help=(
|
| 116 |
+
"Embedding model to compute before launch (e.g. openai/clip-vit-base-patch32). "
|
| 117 |
+
"If omitted, existing embedding spaces are reused."
|
| 118 |
+
),
|
| 119 |
+
)
|
| 120 |
+
parser.add_argument(
|
| 121 |
+
"--method",
|
| 122 |
+
choices=["umap"],
|
| 123 |
+
default="umap",
|
| 124 |
+
help="Projection method (currently only 'umap')",
|
| 125 |
+
)
|
| 126 |
+
parser.add_argument(
|
| 127 |
+
"--layout",
|
| 128 |
+
action="append",
|
| 129 |
+
dest="layouts",
|
| 130 |
+
metavar="GEOMETRY[:2d|3d]",
|
| 131 |
+
help=(
|
| 132 |
+
"Visualization layout to compute. Repeat this flag to request multiple layouts, "
|
| 133 |
+
"for example '--layout euclidean --layout spherical'. "
|
| 134 |
+
"Omitting the suffix defaults to 2D for euclidean/poincare and 3D for spherical. "
|
| 135 |
+
"If omitted, HyperView picks one sensible default layout for the selected embedding space."
|
| 136 |
+
),
|
| 137 |
+
)
|
| 138 |
+
parser.add_argument(
|
| 139 |
+
"--n-neighbors",
|
| 140 |
+
type=int,
|
| 141 |
+
default=15,
|
| 142 |
+
help="UMAP n_neighbors (default: 15)",
|
| 143 |
+
)
|
| 144 |
+
parser.add_argument(
|
| 145 |
+
"--min-dist",
|
| 146 |
+
type=float,
|
| 147 |
+
default=0.1,
|
| 148 |
+
help="UMAP min_dist (default: 0.1)",
|
| 149 |
+
)
|
| 150 |
+
parser.add_argument(
|
| 151 |
+
"--metric",
|
| 152 |
+
type=str,
|
| 153 |
+
default="cosine",
|
| 154 |
+
help="UMAP metric (default: cosine)",
|
| 155 |
+
)
|
| 156 |
+
parser.add_argument(
|
| 157 |
+
"--force-layout",
|
| 158 |
+
action="store_true",
|
| 159 |
+
help="Force layout recomputation even if projection already exists",
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
parser.add_argument(
|
| 163 |
+
"--port",
|
| 164 |
+
type=int,
|
| 165 |
+
default=6262,
|
| 166 |
+
help="Port to run the server on (default: 6262)",
|
| 167 |
+
)
|
| 168 |
+
parser.add_argument(
|
| 169 |
+
"--host",
|
| 170 |
+
type=str,
|
| 171 |
+
default="127.0.0.1",
|
| 172 |
+
help="Host to bind the server to (default: 127.0.0.1)",
|
| 173 |
+
)
|
| 174 |
+
parser.add_argument(
|
| 175 |
+
"--no-browser",
|
| 176 |
+
action="store_true",
|
| 177 |
+
help="Do not open a browser window automatically",
|
| 178 |
+
)
|
| 179 |
+
parser.add_argument(
|
| 180 |
+
"--reuse-server",
|
| 181 |
+
action="store_true",
|
| 182 |
+
help=(
|
| 183 |
+
"If the port is already serving HyperView, attach instead of failing. "
|
| 184 |
+
"For safety, this only attaches when the existing server reports the same dataset name."
|
| 185 |
+
),
|
| 186 |
+
)
|
| 187 |
+
|
| 188 |
+
return parser
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def _validate_args(parser: argparse.ArgumentParser, args: argparse.Namespace) -> None:
|
| 192 |
+
if args.layouts:
|
| 193 |
+
canonical_layouts: list[str] = []
|
| 194 |
+
seen_layouts: set[str] = set()
|
| 195 |
+
for layout_spec in args.layouts:
|
| 196 |
+
try:
|
| 197 |
+
geometry, layout_dimension = parse_visualization_layout(layout_spec)
|
| 198 |
+
except ValueError as exc:
|
| 199 |
+
parser.error(str(exc))
|
| 200 |
+
|
| 201 |
+
canonical_layout = f"{geometry}:{layout_dimension}d"
|
| 202 |
+
if canonical_layout in seen_layouts:
|
| 203 |
+
continue
|
| 204 |
+
seen_layouts.add(canonical_layout)
|
| 205 |
+
canonical_layouts.append(canonical_layout)
|
| 206 |
+
|
| 207 |
+
args.layouts = canonical_layouts
|
| 208 |
+
|
| 209 |
+
if args.hf_dataset and args.images_dir:
|
| 210 |
+
parser.error("Use either --hf-dataset or --images-dir, not both.")
|
| 211 |
+
|
| 212 |
+
if args.dataset_json and (args.hf_dataset or args.images_dir):
|
| 213 |
+
parser.error("--dataset-json cannot be combined with --hf-dataset or --images-dir.")
|
| 214 |
+
|
| 215 |
+
if args.dataset_json and args.dataset:
|
| 216 |
+
parser.error("Use either --dataset or --dataset-json, not both.")
|
| 217 |
+
|
| 218 |
+
if not args.dataset and not args.dataset_json:
|
| 219 |
+
parser.error(
|
| 220 |
+
"Provide --dataset (persistent dataset) or --dataset-json (exported dataset file)."
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
if args.hf_dataset:
|
| 224 |
+
if not args.split:
|
| 225 |
+
parser.error("--split is required when using --hf-dataset.")
|
| 226 |
+
if not args.image_key:
|
| 227 |
+
parser.error("--image-key is required when using --hf-dataset.")
|
| 228 |
+
if args.hf_shuffle_buffer_size < 1:
|
| 229 |
+
parser.error("--hf-shuffle-buffer-size must be at least 1.")
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def _print_ingestion_result(added: int, skipped: int) -> None:
|
| 233 |
+
if skipped > 0:
|
| 234 |
+
print(f"Loaded {added} samples ({skipped} already present)")
|
| 235 |
+
else:
|
| 236 |
+
print(f"Loaded {added} samples")
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
def _ingest_huggingface(dataset: Dataset, args: argparse.Namespace, dataset_name: str) -> None:
|
| 240 |
+
config_suffix = f" [{args.hf_config}]" if args.hf_config else ""
|
| 241 |
+
print(f"Loading HuggingFace dataset {dataset_name}{config_suffix}...")
|
| 242 |
+
added, skipped = dataset.add_from_huggingface(
|
| 243 |
+
dataset_name,
|
| 244 |
+
config=args.hf_config,
|
| 245 |
+
split=args.split,
|
| 246 |
+
image_key=args.image_key,
|
| 247 |
+
label_key=args.label_key,
|
| 248 |
+
label_names_key=args.label_names_key,
|
| 249 |
+
max_samples=args.samples,
|
| 250 |
+
shuffle=args.shuffle,
|
| 251 |
+
seed=args.seed,
|
| 252 |
+
streaming=args.hf_streaming,
|
| 253 |
+
shuffle_buffer_size=args.hf_shuffle_buffer_size,
|
| 254 |
+
)
|
| 255 |
+
_print_ingestion_result(added, skipped)
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
def _prepare_dataset(args: argparse.Namespace) -> Dataset:
|
| 259 |
+
if args.dataset_json:
|
| 260 |
+
print(f"Loading dataset from {args.dataset_json}...")
|
| 261 |
+
dataset = Dataset.load(args.dataset_json)
|
| 262 |
+
print(f"Loaded {len(dataset)} samples")
|
| 263 |
+
return dataset
|
| 264 |
+
|
| 265 |
+
dataset = Dataset(args.dataset)
|
| 266 |
+
print(f"Using dataset '{dataset.name}' ({len(dataset)} samples)")
|
| 267 |
+
|
| 268 |
+
if args.hf_dataset:
|
| 269 |
+
_ingest_huggingface(dataset, args, args.hf_dataset)
|
| 270 |
+
elif args.images_dir:
|
| 271 |
+
print(f"Loading images from {args.images_dir}...")
|
| 272 |
+
added, skipped = dataset.add_images_dir(
|
| 273 |
+
args.images_dir,
|
| 274 |
+
label_from_folder=args.label_from_folder,
|
| 275 |
+
)
|
| 276 |
+
_print_ingestion_result(added, skipped)
|
| 277 |
+
|
| 278 |
+
return dataset
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def _resolve_default_layouts(
|
| 282 |
+
dataset: Dataset,
|
| 283 |
+
space_key: str | None,
|
| 284 |
+
) -> list[str]:
|
| 285 |
+
spaces = dataset.list_spaces()
|
| 286 |
+
selected = next((space for space in spaces if space.space_key == space_key), None)
|
| 287 |
+
|
| 288 |
+
if selected is not None:
|
| 289 |
+
if selected.geometry == "hyperboloid":
|
| 290 |
+
return ["poincare:2d"]
|
| 291 |
+
if selected.geometry == "hypersphere":
|
| 292 |
+
return ["spherical:3d"]
|
| 293 |
+
return ["euclidean:2d"]
|
| 294 |
+
|
| 295 |
+
if any(space.geometry not in ("hyperboloid", "hypersphere") for space in spaces):
|
| 296 |
+
return ["euclidean:2d"]
|
| 297 |
+
if any(space.geometry == "hypersphere" for space in spaces):
|
| 298 |
+
return ["spherical:3d"]
|
| 299 |
+
return ["poincare:2d"]
|
| 300 |
+
|
| 301 |
+
def _compute_layouts(dataset: Dataset, args: argparse.Namespace, space_key: str | None) -> None:
|
| 302 |
+
target_layouts = args.layouts or _resolve_default_layouts(dataset, space_key)
|
| 303 |
+
|
| 304 |
+
print("Computing visualizations...")
|
| 305 |
+
for target_layout in target_layouts:
|
| 306 |
+
dataset.compute_visualization(
|
| 307 |
+
space_key=space_key,
|
| 308 |
+
method=args.method,
|
| 309 |
+
layout=target_layout,
|
| 310 |
+
n_neighbors=args.n_neighbors,
|
| 311 |
+
min_dist=args.min_dist,
|
| 312 |
+
metric=args.metric,
|
| 313 |
+
force=args.force_layout,
|
| 314 |
+
)
|
| 315 |
+
print("Visualizations ready")
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
def _prepare_embeddings_and_layouts(dataset: Dataset, args: argparse.Namespace) -> None:
|
| 319 |
+
has_spaces = len(dataset.list_spaces()) > 0
|
| 320 |
+
|
| 321 |
+
if args.model is not None:
|
| 322 |
+
print(f"Computing embeddings with {args.model}...")
|
| 323 |
+
space_key = dataset.compute_embeddings(model=args.model, show_progress=True)
|
| 324 |
+
print("Embeddings computed")
|
| 325 |
+
_compute_layouts(dataset, args, space_key)
|
| 326 |
+
return
|
| 327 |
+
|
| 328 |
+
if args.force_layout:
|
| 329 |
+
if not has_spaces:
|
| 330 |
+
raise ValueError(
|
| 331 |
+
"No embedding spaces found. Provide --model to compute embeddings first."
|
| 332 |
+
)
|
| 333 |
+
_compute_layouts(dataset, args, space_key=None)
|
| 334 |
+
return
|
| 335 |
+
|
| 336 |
+
if not has_spaces:
|
| 337 |
+
raise ValueError(
|
| 338 |
+
"No embedding spaces found. Provide --model to compute embeddings first."
|
| 339 |
+
)
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
def main():
|
| 343 |
+
"""Main CLI entry point."""
|
| 344 |
+
parser = _build_parser()
|
| 345 |
+
args = parser.parse_args()
|
| 346 |
+
|
| 347 |
+
_validate_args(parser, args)
|
| 348 |
+
|
| 349 |
+
dataset = _prepare_dataset(args)
|
| 350 |
+
_prepare_embeddings_and_layouts(dataset, args)
|
| 351 |
+
|
| 352 |
+
launch(
|
| 353 |
+
dataset,
|
| 354 |
+
port=args.port,
|
| 355 |
+
host=args.host,
|
| 356 |
+
open_browser=not args.no_browser,
|
| 357 |
+
reuse_server=args.reuse_server,
|
| 358 |
+
)
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
if __name__ == "__main__":
|
| 362 |
+
main()
|
vendor/hyperview/core/__init__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Core data structures for HyperView."""
|
| 2 |
+
|
| 3 |
+
from hyperview.core.dataset import Dataset
|
| 4 |
+
from hyperview.core.sample import Sample
|
| 5 |
+
|
| 6 |
+
__all__ = ["Dataset", "Sample"]
|
vendor/hyperview/core/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (428 Bytes). View file
|
|
|
vendor/hyperview/core/__pycache__/dataset.cpython-312.pyc
ADDED
|
Binary file (33.9 kB). View file
|
|
|
vendor/hyperview/core/__pycache__/sample.cpython-312.pyc
ADDED
|
Binary file (5.55 kB). View file
|
|
|
vendor/hyperview/core/__pycache__/selection.cpython-312.pyc
ADDED
|
Binary file (15.1 kB). View file
|
|
|
vendor/hyperview/core/dataset.py
ADDED
|
@@ -0,0 +1,870 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Dataset class for managing collections of samples."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import hashlib
|
| 6 |
+
import json
|
| 7 |
+
import math
|
| 8 |
+
import threading
|
| 9 |
+
import time
|
| 10 |
+
import uuid
|
| 11 |
+
from collections.abc import Callable, Iterator
|
| 12 |
+
from pathlib import Path
|
| 13 |
+
from typing import Any, cast
|
| 14 |
+
|
| 15 |
+
import numpy as np
|
| 16 |
+
from datasets import DownloadConfig, load_dataset
|
| 17 |
+
from PIL import Image
|
| 18 |
+
|
| 19 |
+
from hyperview.core.sample import Sample
|
| 20 |
+
from hyperview.storage.backend import StorageBackend
|
| 21 |
+
from hyperview.storage.schema import (
|
| 22 |
+
make_layout_key,
|
| 23 |
+
normalize_layout_dimension,
|
| 24 |
+
parse_layout_dimension,
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
DEFAULT_VISUALIZATION_LAYOUT = "euclidean"
|
| 29 |
+
VALID_VISUALIZATION_GEOMETRIES = ("euclidean", "poincare", "spherical")
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def _format_elapsed(seconds: float) -> str:
|
| 33 |
+
if seconds < 60:
|
| 34 |
+
return f"{seconds:.1f}s"
|
| 35 |
+
total_seconds = int(round(seconds))
|
| 36 |
+
minutes, secs = divmod(total_seconds, 60)
|
| 37 |
+
if minutes < 60:
|
| 38 |
+
return f"{minutes}m {secs:02d}s"
|
| 39 |
+
hours, minutes = divmod(minutes, 60)
|
| 40 |
+
return f"{hours}h {minutes:02d}m {secs:02d}s"
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def _format_eta(seconds: float) -> str:
|
| 44 |
+
if not math.isfinite(seconds) or seconds < 0:
|
| 45 |
+
return "unknown"
|
| 46 |
+
return _format_elapsed(seconds)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def _fallback_huggingface_fingerprint(
|
| 50 |
+
dataset_name: str,
|
| 51 |
+
config_name: str,
|
| 52 |
+
split: str,
|
| 53 |
+
version: str | None,
|
| 54 |
+
) -> str:
|
| 55 |
+
identity = f"{dataset_name}:{config_name}:{split}:{version or 'unknown'}"
|
| 56 |
+
return hashlib.md5(identity.encode()).hexdigest()
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def parse_visualization_layout(layout: str) -> tuple[str, int]:
|
| 60 |
+
"""Parse a public visualization layout spec like ``euclidean:3d``.
|
| 61 |
+
|
| 62 |
+
Omitting the suffix defaults to 2D for Euclidean and Poincare layouts,
|
| 63 |
+
and to 3D for spherical layouts.
|
| 64 |
+
"""
|
| 65 |
+
layout_spec = layout.strip().lower()
|
| 66 |
+
if not layout_spec:
|
| 67 |
+
raise ValueError("layout must be a non-empty string")
|
| 68 |
+
|
| 69 |
+
if ":" in layout_spec:
|
| 70 |
+
geometry, dimension_spec = layout_spec.rsplit(":", 1)
|
| 71 |
+
else:
|
| 72 |
+
geometry = layout_spec
|
| 73 |
+
dimension_spec = "3d" if geometry.strip() == "spherical" else "2d"
|
| 74 |
+
|
| 75 |
+
geometry = geometry.strip()
|
| 76 |
+
dimension_spec = dimension_spec.strip()
|
| 77 |
+
|
| 78 |
+
if geometry not in VALID_VISUALIZATION_GEOMETRIES:
|
| 79 |
+
raise ValueError(
|
| 80 |
+
"layout geometry must be one of "
|
| 81 |
+
f"{VALID_VISUALIZATION_GEOMETRIES}, got '{geometry}'"
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
if dimension_spec not in ("2d", "3d"):
|
| 85 |
+
raise ValueError(
|
| 86 |
+
"layout must use the form '<geometry>:2d' or '<geometry>:3d', "
|
| 87 |
+
f"got '{layout}'"
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
layout_dimension = normalize_layout_dimension(int(dimension_spec[0]))
|
| 91 |
+
if geometry == "poincare" and layout_dimension != 2:
|
| 92 |
+
raise ValueError("Poincare layouts currently require 2D output.")
|
| 93 |
+
|
| 94 |
+
return geometry, layout_dimension
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class Dataset:
|
| 98 |
+
"""A collection of samples with support for embeddings and visualization.
|
| 99 |
+
|
| 100 |
+
Datasets are automatically persisted to LanceDB by default, providing:
|
| 101 |
+
- Automatic persistence (no need to call save())
|
| 102 |
+
- Vector similarity search
|
| 103 |
+
- Efficient storage and retrieval
|
| 104 |
+
|
| 105 |
+
Embeddings are stored separately from samples, keyed by model_id.
|
| 106 |
+
Layouts (2D/3D projections) are stored per layout_key (space + method).
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
def __init__(
|
| 110 |
+
self,
|
| 111 |
+
name: str | None = None,
|
| 112 |
+
persist: bool = True,
|
| 113 |
+
storage: StorageBackend | None = None,
|
| 114 |
+
):
|
| 115 |
+
self.name = name or f"dataset_{uuid.uuid4().hex[:8]}"
|
| 116 |
+
|
| 117 |
+
if storage is not None:
|
| 118 |
+
self._storage = storage
|
| 119 |
+
elif persist:
|
| 120 |
+
from hyperview.storage import LanceDBBackend, StorageConfig
|
| 121 |
+
|
| 122 |
+
config = StorageConfig.default()
|
| 123 |
+
self._storage = LanceDBBackend(self.name, config)
|
| 124 |
+
else:
|
| 125 |
+
from hyperview.storage import MemoryBackend
|
| 126 |
+
|
| 127 |
+
self._storage = MemoryBackend(self.name)
|
| 128 |
+
|
| 129 |
+
self.last_requested_sample_ids: list[str] = []
|
| 130 |
+
|
| 131 |
+
def __len__(self) -> int:
|
| 132 |
+
return len(self._storage)
|
| 133 |
+
|
| 134 |
+
def __iter__(self) -> Iterator[Sample]:
|
| 135 |
+
return iter(self._storage)
|
| 136 |
+
|
| 137 |
+
def __getitem__(self, sample_id: str) -> Sample:
|
| 138 |
+
sample = self._storage.get_sample(sample_id)
|
| 139 |
+
if sample is None:
|
| 140 |
+
raise KeyError(sample_id)
|
| 141 |
+
return sample
|
| 142 |
+
|
| 143 |
+
def add_sample(self, sample: Sample) -> None:
|
| 144 |
+
"""Add a sample to the dataset (idempotent)."""
|
| 145 |
+
self._storage.add_sample(sample)
|
| 146 |
+
|
| 147 |
+
def _ingest_samples(
|
| 148 |
+
self,
|
| 149 |
+
samples: list[Sample],
|
| 150 |
+
*,
|
| 151 |
+
skip_existing: bool = True,
|
| 152 |
+
) -> tuple[int, int]:
|
| 153 |
+
"""Shared ingestion helper for batch sample insertion."""
|
| 154 |
+
self.last_requested_sample_ids = [sample.id for sample in samples]
|
| 155 |
+
|
| 156 |
+
if not samples:
|
| 157 |
+
return 0, 0
|
| 158 |
+
|
| 159 |
+
skipped = 0
|
| 160 |
+
if skip_existing:
|
| 161 |
+
all_ids = [sample.id for sample in samples]
|
| 162 |
+
existing_ids = self._storage.get_existing_ids(all_ids)
|
| 163 |
+
if existing_ids:
|
| 164 |
+
samples = [sample for sample in samples if sample.id not in existing_ids]
|
| 165 |
+
skipped = len(all_ids) - len(samples)
|
| 166 |
+
|
| 167 |
+
if not samples:
|
| 168 |
+
return 0, skipped
|
| 169 |
+
|
| 170 |
+
self._storage.add_samples_batch(samples)
|
| 171 |
+
|
| 172 |
+
return len(samples), skipped
|
| 173 |
+
|
| 174 |
+
def add_image(
|
| 175 |
+
self,
|
| 176 |
+
filepath: str,
|
| 177 |
+
label: str | None = None,
|
| 178 |
+
metadata: dict[str, Any] | None = None,
|
| 179 |
+
sample_id: str | None = None,
|
| 180 |
+
) -> Sample:
|
| 181 |
+
"""Add a single image to the dataset."""
|
| 182 |
+
if sample_id is None:
|
| 183 |
+
sample_id = hashlib.md5(filepath.encode()).hexdigest()[:12]
|
| 184 |
+
|
| 185 |
+
sample = Sample(
|
| 186 |
+
id=sample_id,
|
| 187 |
+
filepath=filepath,
|
| 188 |
+
label=label,
|
| 189 |
+
metadata=metadata or {},
|
| 190 |
+
)
|
| 191 |
+
self.add_sample(sample)
|
| 192 |
+
return sample
|
| 193 |
+
|
| 194 |
+
def add_images_dir(
|
| 195 |
+
self,
|
| 196 |
+
directory: str,
|
| 197 |
+
extensions: tuple[str, ...] = (".jpg", ".jpeg", ".png", ".webp"),
|
| 198 |
+
label_from_folder: bool = False,
|
| 199 |
+
recursive: bool = True,
|
| 200 |
+
skip_existing: bool = True,
|
| 201 |
+
) -> tuple[int, int]:
|
| 202 |
+
"""Add all images from a directory."""
|
| 203 |
+
directory_path = Path(directory)
|
| 204 |
+
if not directory_path.exists():
|
| 205 |
+
raise ValueError(f"Directory does not exist: {directory_path}")
|
| 206 |
+
|
| 207 |
+
samples = []
|
| 208 |
+
pattern = "**/*" if recursive else "*"
|
| 209 |
+
|
| 210 |
+
for path in directory_path.glob(pattern):
|
| 211 |
+
if path.is_file() and path.suffix.lower() in extensions:
|
| 212 |
+
label = path.parent.name if label_from_folder else None
|
| 213 |
+
sample_id = hashlib.md5(str(path).encode()).hexdigest()[:12]
|
| 214 |
+
sample = Sample(
|
| 215 |
+
id=sample_id,
|
| 216 |
+
filepath=str(path),
|
| 217 |
+
label=label,
|
| 218 |
+
metadata={},
|
| 219 |
+
)
|
| 220 |
+
samples.append(sample)
|
| 221 |
+
|
| 222 |
+
return self._ingest_samples(samples, skip_existing=skip_existing)
|
| 223 |
+
|
| 224 |
+
def add_from_huggingface(
|
| 225 |
+
self,
|
| 226 |
+
dataset_name: str,
|
| 227 |
+
config: str | None = None,
|
| 228 |
+
split: str = "train",
|
| 229 |
+
image_key: str = "img",
|
| 230 |
+
label_key: str | None = "fine_label",
|
| 231 |
+
label_names_key: str | None = None,
|
| 232 |
+
max_samples: int | None = None,
|
| 233 |
+
shuffle: bool = False,
|
| 234 |
+
seed: int = 42,
|
| 235 |
+
streaming: bool = False,
|
| 236 |
+
shuffle_buffer_size: int = 1000,
|
| 237 |
+
show_progress: bool = True,
|
| 238 |
+
skip_existing: bool = True,
|
| 239 |
+
image_format: str = "auto",
|
| 240 |
+
) -> tuple[int, int]:
|
| 241 |
+
"""Load samples from a Hugging Face dataset."""
|
| 242 |
+
from hyperview.storage import StorageConfig
|
| 243 |
+
|
| 244 |
+
source_index_key = "__hyperview_source_index"
|
| 245 |
+
|
| 246 |
+
def attach_huggingface_source_index(
|
| 247 |
+
example: dict[str, Any],
|
| 248 |
+
index: int,
|
| 249 |
+
) -> dict[str, Any]:
|
| 250 |
+
augmented = dict(example)
|
| 251 |
+
augmented[source_index_key] = index
|
| 252 |
+
return augmented
|
| 253 |
+
|
| 254 |
+
if shuffle_buffer_size < 1:
|
| 255 |
+
raise ValueError("shuffle_buffer_size must be >= 1")
|
| 256 |
+
|
| 257 |
+
if streaming:
|
| 258 |
+
ds = cast(
|
| 259 |
+
Any,
|
| 260 |
+
load_dataset(
|
| 261 |
+
dataset_name,
|
| 262 |
+
name=config,
|
| 263 |
+
split=split,
|
| 264 |
+
streaming=True,
|
| 265 |
+
),
|
| 266 |
+
)
|
| 267 |
+
else:
|
| 268 |
+
try:
|
| 269 |
+
ds = cast(
|
| 270 |
+
Any,
|
| 271 |
+
load_dataset(
|
| 272 |
+
dataset_name,
|
| 273 |
+
name=config,
|
| 274 |
+
split=split,
|
| 275 |
+
download_config=DownloadConfig(local_files_only=True),
|
| 276 |
+
),
|
| 277 |
+
)
|
| 278 |
+
except Exception:
|
| 279 |
+
ds = cast(Any, load_dataset(dataset_name, name=config, split=split))
|
| 280 |
+
|
| 281 |
+
source_fingerprint = ds._fingerprint if hasattr(ds, "_fingerprint") else None
|
| 282 |
+
|
| 283 |
+
label_names = None
|
| 284 |
+
if label_key and label_names_key:
|
| 285 |
+
if label_names_key in ds.features:
|
| 286 |
+
label_names = ds.features[label_names_key].names
|
| 287 |
+
elif label_key:
|
| 288 |
+
if hasattr(ds.features[label_key], "names"):
|
| 289 |
+
label_names = ds.features[label_key].names
|
| 290 |
+
|
| 291 |
+
config_name = getattr(ds.info, "config_name", None) or "default"
|
| 292 |
+
version = str(ds.info.version) if ds.info.version else None
|
| 293 |
+
fingerprint_source = source_fingerprint or _fallback_huggingface_fingerprint(
|
| 294 |
+
dataset_name,
|
| 295 |
+
config_name,
|
| 296 |
+
split,
|
| 297 |
+
version,
|
| 298 |
+
)
|
| 299 |
+
fingerprint = fingerprint_source[:8]
|
| 300 |
+
|
| 301 |
+
total: int | None
|
| 302 |
+
selected_indices: list[int] | None = None
|
| 303 |
+
iterator: Iterator[Any]
|
| 304 |
+
if streaming:
|
| 305 |
+
stream = ds
|
| 306 |
+
if hasattr(stream, "select_columns"):
|
| 307 |
+
columns = [image_key]
|
| 308 |
+
if label_key:
|
| 309 |
+
columns.append(label_key)
|
| 310 |
+
stream = stream.select_columns(list(dict.fromkeys(columns)))
|
| 311 |
+
stream = stream.map(attach_huggingface_source_index, with_indices=True)
|
| 312 |
+
if shuffle:
|
| 313 |
+
if hasattr(stream, "reshard"):
|
| 314 |
+
stream = stream.reshard()
|
| 315 |
+
stream = stream.shuffle(seed=seed, buffer_size=shuffle_buffer_size)
|
| 316 |
+
if max_samples is not None:
|
| 317 |
+
total = max_samples
|
| 318 |
+
iterator = iter(stream.take(max_samples))
|
| 319 |
+
else:
|
| 320 |
+
total = None
|
| 321 |
+
iterator = iter(stream)
|
| 322 |
+
else:
|
| 323 |
+
dataset_size = len(ds)
|
| 324 |
+
total = dataset_size if max_samples is None else min(dataset_size, max_samples)
|
| 325 |
+
|
| 326 |
+
if shuffle:
|
| 327 |
+
rng = np.random.default_rng(seed)
|
| 328 |
+
selected_indices = rng.permutation(dataset_size)[:total].tolist()
|
| 329 |
+
ds = ds.select(selected_indices)
|
| 330 |
+
elif max_samples is not None:
|
| 331 |
+
selected_indices = list(range(total))
|
| 332 |
+
ds = ds.select(selected_indices)
|
| 333 |
+
|
| 334 |
+
iterator = (ds[i] for i in range(total))
|
| 335 |
+
|
| 336 |
+
storage_config = StorageConfig.default()
|
| 337 |
+
media_dir = storage_config.get_huggingface_media_dir(dataset_name, split)
|
| 338 |
+
|
| 339 |
+
samples: list[Sample] = []
|
| 340 |
+
|
| 341 |
+
if show_progress:
|
| 342 |
+
if total is None:
|
| 343 |
+
print(f"Loading samples from {dataset_name} via streaming...", flush=True)
|
| 344 |
+
else:
|
| 345 |
+
mode_suffix = " via streaming" if streaming else ""
|
| 346 |
+
print(
|
| 347 |
+
f"Loading {total} samples from {dataset_name}{mode_suffix}...",
|
| 348 |
+
flush=True,
|
| 349 |
+
)
|
| 350 |
+
|
| 351 |
+
started_at = time.perf_counter()
|
| 352 |
+
last_report_at = started_at
|
| 353 |
+
report_every = 50 if total is None else max(25, min(200, total // 20 or 25))
|
| 354 |
+
loaded = 0
|
| 355 |
+
progress_stop = threading.Event()
|
| 356 |
+
|
| 357 |
+
def emit_progress(now: float, *, waiting_for_first_sample: bool = False) -> None:
|
| 358 |
+
elapsed = max(now - started_at, 1e-9)
|
| 359 |
+
if waiting_for_first_sample:
|
| 360 |
+
if total is None:
|
| 361 |
+
print(
|
| 362 |
+
"Still waiting for the first streamed sample "
|
| 363 |
+
f"(elapsed {_format_elapsed(elapsed)})",
|
| 364 |
+
flush=True,
|
| 365 |
+
)
|
| 366 |
+
else:
|
| 367 |
+
print(
|
| 368 |
+
"Still waiting for the first streamed sample "
|
| 369 |
+
f"(0/{total}, elapsed {_format_elapsed(elapsed)})",
|
| 370 |
+
flush=True,
|
| 371 |
+
)
|
| 372 |
+
return
|
| 373 |
+
|
| 374 |
+
rate = loaded / elapsed
|
| 375 |
+
if total is None:
|
| 376 |
+
print(
|
| 377 |
+
f"Loaded {loaded} samples "
|
| 378 |
+
f"({rate:.1f}/s, elapsed {_format_elapsed(elapsed)})",
|
| 379 |
+
flush=True,
|
| 380 |
+
)
|
| 381 |
+
return
|
| 382 |
+
|
| 383 |
+
remaining = total - loaded
|
| 384 |
+
eta_seconds = remaining / rate if rate > 0 else float("inf")
|
| 385 |
+
print(
|
| 386 |
+
f"Loaded {loaded}/{total} samples "
|
| 387 |
+
f"({loaded / total:.0%}, {rate:.1f}/s, "
|
| 388 |
+
f"elapsed {_format_elapsed(elapsed)}, ETA {_format_eta(eta_seconds)})",
|
| 389 |
+
flush=True,
|
| 390 |
+
)
|
| 391 |
+
|
| 392 |
+
heartbeat: threading.Thread | None = None
|
| 393 |
+
if show_progress and streaming:
|
| 394 |
+
def heartbeat_reporter() -> None:
|
| 395 |
+
while not progress_stop.wait(10.0):
|
| 396 |
+
now = time.perf_counter()
|
| 397 |
+
if loaded == 0:
|
| 398 |
+
emit_progress(now, waiting_for_first_sample=True)
|
| 399 |
+
elif now - last_report_at >= 10.0:
|
| 400 |
+
emit_progress(now)
|
| 401 |
+
|
| 402 |
+
heartbeat = threading.Thread(
|
| 403 |
+
target=heartbeat_reporter,
|
| 404 |
+
name="hyperview-hf-progress",
|
| 405 |
+
daemon=True,
|
| 406 |
+
)
|
| 407 |
+
heartbeat.start()
|
| 408 |
+
|
| 409 |
+
try:
|
| 410 |
+
for i, item in enumerate(iterator):
|
| 411 |
+
if streaming:
|
| 412 |
+
source_index = int(item.pop(source_index_key, i))
|
| 413 |
+
else:
|
| 414 |
+
source_index = selected_indices[i] if selected_indices is not None else i
|
| 415 |
+
image = item[image_key]
|
| 416 |
+
|
| 417 |
+
if isinstance(image, Image.Image):
|
| 418 |
+
pil_image = image
|
| 419 |
+
else:
|
| 420 |
+
pil_image = Image.fromarray(np.asarray(image))
|
| 421 |
+
|
| 422 |
+
label = None
|
| 423 |
+
if label_key and label_key in item:
|
| 424 |
+
label_idx = item[label_key]
|
| 425 |
+
if label_names and isinstance(label_idx, int):
|
| 426 |
+
label = label_names[label_idx]
|
| 427 |
+
else:
|
| 428 |
+
label = str(label_idx)
|
| 429 |
+
|
| 430 |
+
safe_name = dataset_name.replace("/", "_")
|
| 431 |
+
sample_id = f"{safe_name}_{config_name}_{fingerprint}_{split}_{source_index}"
|
| 432 |
+
|
| 433 |
+
if image_format == "auto":
|
| 434 |
+
original_format = getattr(pil_image, "format", None)
|
| 435 |
+
if original_format in ("JPEG", "JPG"):
|
| 436 |
+
save_format = "JPEG"
|
| 437 |
+
ext = ".jpg"
|
| 438 |
+
else:
|
| 439 |
+
save_format = "PNG"
|
| 440 |
+
ext = ".png"
|
| 441 |
+
elif image_format == "jpeg":
|
| 442 |
+
save_format = "JPEG"
|
| 443 |
+
ext = ".jpg"
|
| 444 |
+
else:
|
| 445 |
+
save_format = "PNG"
|
| 446 |
+
ext = ".png"
|
| 447 |
+
|
| 448 |
+
metadata = {
|
| 449 |
+
"source": dataset_name,
|
| 450 |
+
"config": config_name,
|
| 451 |
+
"split": split,
|
| 452 |
+
"index": source_index,
|
| 453 |
+
"fingerprint": source_fingerprint,
|
| 454 |
+
"version": version,
|
| 455 |
+
}
|
| 456 |
+
|
| 457 |
+
image_path = media_dir / f"{sample_id}{ext}"
|
| 458 |
+
if not image_path.exists():
|
| 459 |
+
if save_format == "JPEG" or pil_image.mode in ("RGBA", "P", "L"):
|
| 460 |
+
pil_image = pil_image.convert("RGB")
|
| 461 |
+
pil_image.save(image_path, format=save_format)
|
| 462 |
+
|
| 463 |
+
sample = Sample(
|
| 464 |
+
id=sample_id,
|
| 465 |
+
filepath=str(image_path),
|
| 466 |
+
label=label,
|
| 467 |
+
metadata=metadata,
|
| 468 |
+
)
|
| 469 |
+
|
| 470 |
+
samples.append(sample)
|
| 471 |
+
loaded += 1
|
| 472 |
+
|
| 473 |
+
if not show_progress:
|
| 474 |
+
continue
|
| 475 |
+
|
| 476 |
+
now = time.perf_counter()
|
| 477 |
+
should_report = loaded == 1
|
| 478 |
+
if total is not None and loaded == total:
|
| 479 |
+
should_report = True
|
| 480 |
+
if loaded % report_every == 0:
|
| 481 |
+
should_report = True
|
| 482 |
+
if now - last_report_at >= 5.0:
|
| 483 |
+
should_report = True
|
| 484 |
+
if not should_report:
|
| 485 |
+
continue
|
| 486 |
+
|
| 487 |
+
emit_progress(now)
|
| 488 |
+
last_report_at = now
|
| 489 |
+
finally:
|
| 490 |
+
progress_stop.set()
|
| 491 |
+
if heartbeat is not None:
|
| 492 |
+
heartbeat.join(timeout=0.1)
|
| 493 |
+
|
| 494 |
+
if total is None:
|
| 495 |
+
total = loaded
|
| 496 |
+
|
| 497 |
+
num_added, skipped = self._ingest_samples(samples, skip_existing=skip_existing)
|
| 498 |
+
|
| 499 |
+
if show_progress:
|
| 500 |
+
print(
|
| 501 |
+
f"Prepared {loaded} samples in {_format_elapsed(time.perf_counter() - started_at)}",
|
| 502 |
+
flush=True,
|
| 503 |
+
)
|
| 504 |
+
print(f"Images saved to: {media_dir}", flush=True)
|
| 505 |
+
if skipped > 0:
|
| 506 |
+
print(f"Skipped {skipped} existing samples", flush=True)
|
| 507 |
+
|
| 508 |
+
return num_added, skipped
|
| 509 |
+
|
| 510 |
+
def compute_embeddings(
|
| 511 |
+
self,
|
| 512 |
+
model: str,
|
| 513 |
+
*,
|
| 514 |
+
provider: str | None = None,
|
| 515 |
+
checkpoint: str | None = None,
|
| 516 |
+
batch_size: int = 32,
|
| 517 |
+
sample_ids: list[str] | None = None,
|
| 518 |
+
show_progress: bool = True,
|
| 519 |
+
**provider_kwargs: Any,
|
| 520 |
+
) -> str:
|
| 521 |
+
"""Compute embeddings for samples that don't have them yet.
|
| 522 |
+
|
| 523 |
+
Embeddings are stored in a dedicated space keyed by the embedding spec.
|
| 524 |
+
|
| 525 |
+
Args:
|
| 526 |
+
model: Model identifier (required). Use a HuggingFace model_id
|
| 527 |
+
(e.g. 'openai/clip-vit-base-patch32') for embed-anything, or a
|
| 528 |
+
hyper-models name (e.g. 'hycoclip-vit-s') for hyperbolic embeddings.
|
| 529 |
+
provider: Explicit provider identifier. If not specified, auto-detected:
|
| 530 |
+
'hyper-models' if model matches a hyper-models name, else 'embed-anything'.
|
| 531 |
+
Available providers: `hyperview.embeddings.list_embedding_providers()`.
|
| 532 |
+
checkpoint: Checkpoint path/URL (hf://... or local path) for weight-only models.
|
| 533 |
+
batch_size: Batch size for processing.
|
| 534 |
+
sample_ids: Optional subset of sample IDs to ensure embeddings for.
|
| 535 |
+
If omitted, computes embeddings for all samples missing them.
|
| 536 |
+
show_progress: Whether to show progress bar.
|
| 537 |
+
**provider_kwargs: Additional kwargs passed to the embedding function.
|
| 538 |
+
|
| 539 |
+
Returns:
|
| 540 |
+
space_key for the embedding space.
|
| 541 |
+
|
| 542 |
+
Raises:
|
| 543 |
+
ValueError: If model is not provided.
|
| 544 |
+
"""
|
| 545 |
+
if not model:
|
| 546 |
+
raise ValueError(
|
| 547 |
+
"model is required. Examples: 'openai/clip-vit-base-patch32' (CLIP), "
|
| 548 |
+
"'hycoclip-vit-s' (hyperbolic). See hyperview.embeddings.list_embedding_providers()."
|
| 549 |
+
)
|
| 550 |
+
|
| 551 |
+
from hyperview.embeddings.engine import EmbeddingSpec
|
| 552 |
+
from hyperview.embeddings.pipelines import compute_embeddings
|
| 553 |
+
|
| 554 |
+
if provider is None:
|
| 555 |
+
provider = "embed-anything"
|
| 556 |
+
try:
|
| 557 |
+
import hyper_models
|
| 558 |
+
if model in hyper_models.list_models():
|
| 559 |
+
provider = "hyper-models"
|
| 560 |
+
except ImportError:
|
| 561 |
+
pass
|
| 562 |
+
spec = EmbeddingSpec(
|
| 563 |
+
provider=provider,
|
| 564 |
+
model_id=model,
|
| 565 |
+
checkpoint=checkpoint,
|
| 566 |
+
provider_kwargs=provider_kwargs,
|
| 567 |
+
)
|
| 568 |
+
|
| 569 |
+
space_key, _num_computed, _num_skipped = compute_embeddings(
|
| 570 |
+
storage=self._storage,
|
| 571 |
+
spec=spec,
|
| 572 |
+
batch_size=batch_size,
|
| 573 |
+
sample_ids=sample_ids,
|
| 574 |
+
show_progress=show_progress,
|
| 575 |
+
)
|
| 576 |
+
return space_key
|
| 577 |
+
|
| 578 |
+
def compute_visualization(
|
| 579 |
+
self,
|
| 580 |
+
space_key: str | None = None,
|
| 581 |
+
method: str = "umap",
|
| 582 |
+
layout: str = DEFAULT_VISUALIZATION_LAYOUT,
|
| 583 |
+
n_neighbors: int = 15,
|
| 584 |
+
min_dist: float = 0.1,
|
| 585 |
+
metric: str = "cosine",
|
| 586 |
+
force: bool = False,
|
| 587 |
+
) -> str:
|
| 588 |
+
"""Compute projections for visualization.
|
| 589 |
+
|
| 590 |
+
Args:
|
| 591 |
+
space_key: Embedding space to project. If None, uses the first available.
|
| 592 |
+
method: Projection method ('umap' supported).
|
| 593 |
+
layout: Layout spec like 'euclidean', 'euclidean:3d', or 'spherical'.
|
| 594 |
+
Omitting the suffix defaults to 2D for Euclidean/Poincare and 3D for spherical.
|
| 595 |
+
n_neighbors: Number of neighbors for UMAP.
|
| 596 |
+
min_dist: Minimum distance for UMAP.
|
| 597 |
+
metric: Distance metric for UMAP.
|
| 598 |
+
force: Force recomputation even if layout exists.
|
| 599 |
+
|
| 600 |
+
Returns:
|
| 601 |
+
layout_key for the computed layout.
|
| 602 |
+
"""
|
| 603 |
+
from hyperview.embeddings.pipelines import compute_layout
|
| 604 |
+
|
| 605 |
+
geometry, layout_dimension = parse_visualization_layout(layout)
|
| 606 |
+
|
| 607 |
+
return compute_layout(
|
| 608 |
+
storage=self._storage,
|
| 609 |
+
space_key=space_key,
|
| 610 |
+
method=method,
|
| 611 |
+
geometry=geometry,
|
| 612 |
+
layout_dimension=layout_dimension,
|
| 613 |
+
n_neighbors=n_neighbors,
|
| 614 |
+
min_dist=min_dist,
|
| 615 |
+
metric=metric,
|
| 616 |
+
force=force,
|
| 617 |
+
show_progress=True,
|
| 618 |
+
)
|
| 619 |
+
|
| 620 |
+
def list_spaces(self) -> list[Any]:
|
| 621 |
+
"""List all embedding spaces in this dataset."""
|
| 622 |
+
return self._storage.list_spaces()
|
| 623 |
+
|
| 624 |
+
def list_layouts(self) -> list[Any]:
|
| 625 |
+
"""List all layouts in this dataset (returns LayoutInfo objects)."""
|
| 626 |
+
return self._storage.list_layouts()
|
| 627 |
+
|
| 628 |
+
def find_similar(
|
| 629 |
+
self,
|
| 630 |
+
sample_id: str,
|
| 631 |
+
k: int = 10,
|
| 632 |
+
space_key: str | None = None,
|
| 633 |
+
) -> list[tuple[Sample, float]]:
|
| 634 |
+
"""Find k most similar samples to a given sample.
|
| 635 |
+
|
| 636 |
+
Args:
|
| 637 |
+
sample_id: ID of the query sample.
|
| 638 |
+
k: Number of neighbors to return.
|
| 639 |
+
space_key: Embedding space to search in. If None, uses first available.
|
| 640 |
+
|
| 641 |
+
Returns:
|
| 642 |
+
List of (sample, distance) tuples, sorted by distance ascending.
|
| 643 |
+
"""
|
| 644 |
+
return self._storage.find_similar(sample_id, k, space_key)
|
| 645 |
+
|
| 646 |
+
def find_similar_by_vector(
|
| 647 |
+
self,
|
| 648 |
+
vector: list[float],
|
| 649 |
+
k: int = 10,
|
| 650 |
+
space_key: str | None = None,
|
| 651 |
+
) -> list[tuple[Sample, float]]:
|
| 652 |
+
"""Find k most similar samples to a given vector.
|
| 653 |
+
|
| 654 |
+
Args:
|
| 655 |
+
vector: Query vector.
|
| 656 |
+
k: Number of neighbors to return.
|
| 657 |
+
space_key: Embedding space to search in. If None, uses first available.
|
| 658 |
+
|
| 659 |
+
Returns:
|
| 660 |
+
List of (sample, distance) tuples, sorted by distance ascending.
|
| 661 |
+
"""
|
| 662 |
+
return self._storage.find_similar_by_vector(vector, k, space_key)
|
| 663 |
+
|
| 664 |
+
def set_coords(
|
| 665 |
+
self,
|
| 666 |
+
geometry: str,
|
| 667 |
+
ids: list[str],
|
| 668 |
+
coords: np.ndarray | list[list[float]],
|
| 669 |
+
) -> str:
|
| 670 |
+
"""Set precomputed coordinates for visualization.
|
| 671 |
+
|
| 672 |
+
Use this when you have precomputed projections and want to skip
|
| 673 |
+
embedding computation. Useful for smoke tests or external projections.
|
| 674 |
+
|
| 675 |
+
Args:
|
| 676 |
+
geometry: "euclidean", "poincare", or "spherical".
|
| 677 |
+
ids: List of sample IDs.
|
| 678 |
+
coords: (N, D) array of coordinates.
|
| 679 |
+
|
| 680 |
+
Returns:
|
| 681 |
+
The layout_key for the stored coordinates.
|
| 682 |
+
|
| 683 |
+
Example:
|
| 684 |
+
>>> dataset.set_coords("euclidean", ["s0", "s1"], [[0.1, 0.2], [0.3, 0.4]])
|
| 685 |
+
>>> dataset.set_coords("spherical", ["s0", "s1"], [[0.1, 0.2, 0.3], [0.3, 0.4, 0.5]])
|
| 686 |
+
>>> hv.launch(dataset)
|
| 687 |
+
"""
|
| 688 |
+
if geometry not in ("euclidean", "poincare", "spherical"):
|
| 689 |
+
raise ValueError(
|
| 690 |
+
f"geometry must be 'euclidean', 'poincare', or 'spherical', got '{geometry}'"
|
| 691 |
+
)
|
| 692 |
+
|
| 693 |
+
coords_arr = np.asarray(coords, dtype=np.float32)
|
| 694 |
+
if coords_arr.ndim != 2 or coords_arr.shape[1] < 2:
|
| 695 |
+
raise ValueError(f"coords must be (N, D) with D>=2, got shape {coords_arr.shape}")
|
| 696 |
+
|
| 697 |
+
layout_dimension = normalize_layout_dimension(int(coords_arr.shape[1]))
|
| 698 |
+
if geometry == "poincare" and layout_dimension != 2:
|
| 699 |
+
raise ValueError("poincare precomputed layouts must be 2D")
|
| 700 |
+
|
| 701 |
+
# Ensure a synthetic space exists (required by launch())
|
| 702 |
+
space_key = f"precomputed_{layout_dimension}d"
|
| 703 |
+
if not any(s.space_key == space_key for s in self._storage.list_spaces()):
|
| 704 |
+
precomputed_config = {
|
| 705 |
+
"provider": "precomputed",
|
| 706 |
+
"geometry": "unknown", # Precomputed coords don't have a source embedding geometry
|
| 707 |
+
}
|
| 708 |
+
self._storage.ensure_space(space_key, dim=layout_dimension, config=precomputed_config)
|
| 709 |
+
|
| 710 |
+
layout_key = make_layout_key(
|
| 711 |
+
space_key,
|
| 712 |
+
method="precomputed",
|
| 713 |
+
geometry=geometry,
|
| 714 |
+
layout_dimension=layout_dimension,
|
| 715 |
+
)
|
| 716 |
+
|
| 717 |
+
# Ensure layout registry entry exists
|
| 718 |
+
self._storage.ensure_layout(
|
| 719 |
+
layout_key=layout_key,
|
| 720 |
+
space_key=space_key,
|
| 721 |
+
method="precomputed",
|
| 722 |
+
geometry=geometry,
|
| 723 |
+
params=None,
|
| 724 |
+
)
|
| 725 |
+
|
| 726 |
+
self._storage.add_layout_coords(layout_key, list(ids), coords_arr)
|
| 727 |
+
return layout_key
|
| 728 |
+
|
| 729 |
+
@property
|
| 730 |
+
def samples(self) -> list[Sample]:
|
| 731 |
+
"""Get all samples as a list."""
|
| 732 |
+
return self._storage.get_all_samples()
|
| 733 |
+
|
| 734 |
+
@property
|
| 735 |
+
def labels(self) -> list[str]:
|
| 736 |
+
"""Get unique labels in the dataset."""
|
| 737 |
+
return self._storage.get_unique_labels()
|
| 738 |
+
|
| 739 |
+
def filter(self, predicate: Callable[[Sample], bool]) -> list[Sample]:
|
| 740 |
+
"""Filter samples based on a predicate function."""
|
| 741 |
+
return self._storage.filter(predicate)
|
| 742 |
+
|
| 743 |
+
def get_samples_paginated(
|
| 744 |
+
self,
|
| 745 |
+
offset: int = 0,
|
| 746 |
+
limit: int = 100,
|
| 747 |
+
label: str | None = None,
|
| 748 |
+
) -> tuple[list[Sample], int]:
|
| 749 |
+
"""Get paginated samples.
|
| 750 |
+
|
| 751 |
+
This avoids loading all samples into memory and is used by the server
|
| 752 |
+
API for efficient pagination.
|
| 753 |
+
"""
|
| 754 |
+
return self._storage.get_samples_paginated(offset=offset, limit=limit, label=label)
|
| 755 |
+
|
| 756 |
+
def get_samples_by_ids(self, sample_ids: list[str]) -> list[Sample]:
|
| 757 |
+
"""Retrieve multiple samples by ID.
|
| 758 |
+
|
| 759 |
+
The returned list is aligned to the input order and skips missing IDs.
|
| 760 |
+
"""
|
| 761 |
+
return self._storage.get_samples_by_ids(sample_ids)
|
| 762 |
+
|
| 763 |
+
def get_visualization_data(
|
| 764 |
+
self,
|
| 765 |
+
layout_key: str,
|
| 766 |
+
) -> tuple[list[str], list[str | None], np.ndarray]:
|
| 767 |
+
"""Get visualization data (ids, labels, coords) for a layout."""
|
| 768 |
+
layout_dimension = parse_layout_dimension(layout_key)
|
| 769 |
+
|
| 770 |
+
layout_ids, layout_coords = self._storage.get_layout_coords(layout_key)
|
| 771 |
+
if not layout_ids:
|
| 772 |
+
return [], [], np.empty((0, layout_dimension), dtype=np.float32)
|
| 773 |
+
|
| 774 |
+
labels_by_id = self._storage.get_labels_by_ids(layout_ids)
|
| 775 |
+
|
| 776 |
+
ids: list[str] = []
|
| 777 |
+
labels: list[str | None] = []
|
| 778 |
+
coords: list[np.ndarray] = []
|
| 779 |
+
|
| 780 |
+
for i, sample_id in enumerate(layout_ids):
|
| 781 |
+
if sample_id in labels_by_id:
|
| 782 |
+
ids.append(sample_id)
|
| 783 |
+
labels.append(labels_by_id[sample_id])
|
| 784 |
+
coords.append(layout_coords[i])
|
| 785 |
+
|
| 786 |
+
if not coords:
|
| 787 |
+
return [], [], np.empty((0, layout_dimension), dtype=np.float32)
|
| 788 |
+
|
| 789 |
+
return ids, labels, np.asarray(coords, dtype=np.float32)
|
| 790 |
+
|
| 791 |
+
|
| 792 |
+
def get_lasso_candidates_aabb(
|
| 793 |
+
self,
|
| 794 |
+
*,
|
| 795 |
+
layout_key: str,
|
| 796 |
+
x_min: float,
|
| 797 |
+
x_max: float,
|
| 798 |
+
y_min: float,
|
| 799 |
+
y_max: float,
|
| 800 |
+
label_filter: str | None = None,
|
| 801 |
+
) -> tuple[list[str], np.ndarray]:
|
| 802 |
+
"""Return candidate (id, xy) rows within an AABB for a layout."""
|
| 803 |
+
return self._storage.get_lasso_candidates_aabb(
|
| 804 |
+
layout_key=layout_key,
|
| 805 |
+
x_min=x_min,
|
| 806 |
+
x_max=x_max,
|
| 807 |
+
y_min=y_min,
|
| 808 |
+
y_max=y_max,
|
| 809 |
+
label_filter=label_filter,
|
| 810 |
+
)
|
| 811 |
+
|
| 812 |
+
def save(self, filepath: str, include_thumbnails: bool = True) -> None:
|
| 813 |
+
"""Export dataset to a JSON file.
|
| 814 |
+
|
| 815 |
+
Args:
|
| 816 |
+
filepath: Path to save the JSON file.
|
| 817 |
+
include_thumbnails: Whether to include cached thumbnails.
|
| 818 |
+
"""
|
| 819 |
+
samples = self._storage.get_all_samples()
|
| 820 |
+
if include_thumbnails:
|
| 821 |
+
for s in samples:
|
| 822 |
+
s.cache_thumbnail()
|
| 823 |
+
|
| 824 |
+
data = {
|
| 825 |
+
"name": self.name,
|
| 826 |
+
"samples": [
|
| 827 |
+
{
|
| 828 |
+
"id": s.id,
|
| 829 |
+
"filepath": s.filepath,
|
| 830 |
+
"label": s.label,
|
| 831 |
+
"metadata": s.metadata,
|
| 832 |
+
"thumbnail_base64": s.thumbnail_base64 if include_thumbnails else None,
|
| 833 |
+
}
|
| 834 |
+
for s in samples
|
| 835 |
+
],
|
| 836 |
+
}
|
| 837 |
+
with open(filepath, "w") as f:
|
| 838 |
+
json.dump(data, f)
|
| 839 |
+
|
| 840 |
+
@classmethod
|
| 841 |
+
def load(cls, filepath: str, persist: bool = False) -> Dataset:
|
| 842 |
+
"""Load dataset from a JSON file.
|
| 843 |
+
|
| 844 |
+
Args:
|
| 845 |
+
filepath: Path to the JSON file.
|
| 846 |
+
persist: If True, persist the loaded data to LanceDB.
|
| 847 |
+
If False (default), keep in memory only.
|
| 848 |
+
|
| 849 |
+
Returns:
|
| 850 |
+
Dataset instance.
|
| 851 |
+
"""
|
| 852 |
+
with open(filepath) as f:
|
| 853 |
+
data = json.load(f)
|
| 854 |
+
|
| 855 |
+
dataset = cls(name=data["name"], persist=persist)
|
| 856 |
+
|
| 857 |
+
# Add samples
|
| 858 |
+
samples = []
|
| 859 |
+
for s_data in data["samples"]:
|
| 860 |
+
sample = Sample(
|
| 861 |
+
id=s_data["id"],
|
| 862 |
+
filepath=s_data["filepath"],
|
| 863 |
+
label=s_data.get("label"),
|
| 864 |
+
metadata=s_data.get("metadata", {}),
|
| 865 |
+
thumbnail_base64=s_data.get("thumbnail_base64"),
|
| 866 |
+
)
|
| 867 |
+
samples.append(sample)
|
| 868 |
+
|
| 869 |
+
dataset._storage.add_samples_batch(samples)
|
| 870 |
+
return dataset
|
vendor/hyperview/core/sample.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Sample class representing a single data point in a dataset."""
|
| 2 |
+
|
| 3 |
+
import base64
|
| 4 |
+
import io
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from typing import Any
|
| 7 |
+
|
| 8 |
+
from PIL import Image
|
| 9 |
+
from pydantic import BaseModel, Field
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class Sample(BaseModel):
|
| 13 |
+
"""A single sample in a HyperView dataset.
|
| 14 |
+
|
| 15 |
+
Samples are pure metadata containers. Embeddings and layouts are stored
|
| 16 |
+
separately in dedicated tables (per embedding space / per layout).
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
id: str = Field(..., description="Unique identifier for the sample")
|
| 20 |
+
filepath: str = Field(..., description="Path to the image file")
|
| 21 |
+
label: str | None = Field(default=None, description="Label for the sample")
|
| 22 |
+
metadata: dict[str, Any] = Field(default_factory=dict, description="Additional metadata")
|
| 23 |
+
thumbnail_base64: str | None = Field(default=None, description="Cached thumbnail as base64")
|
| 24 |
+
width: int | None = Field(default=None, description="Image width in pixels")
|
| 25 |
+
height: int | None = Field(default=None, description="Image height in pixels")
|
| 26 |
+
|
| 27 |
+
model_config = {"arbitrary_types_allowed": True}
|
| 28 |
+
|
| 29 |
+
@property
|
| 30 |
+
def filename(self) -> str:
|
| 31 |
+
"""Get the filename from the filepath."""
|
| 32 |
+
return Path(self.filepath).name
|
| 33 |
+
|
| 34 |
+
def load_image(self) -> Image.Image:
|
| 35 |
+
"""Load the image from disk."""
|
| 36 |
+
return Image.open(self.filepath)
|
| 37 |
+
|
| 38 |
+
def get_thumbnail(self, size: tuple[int, int] = (128, 128)) -> Image.Image:
|
| 39 |
+
"""Get a thumbnail of the image. Also captures original dimensions."""
|
| 40 |
+
img = self.load_image()
|
| 41 |
+
# Capture original dimensions while we have the image loaded
|
| 42 |
+
if self.width is None or self.height is None:
|
| 43 |
+
self.width, self.height = img.size
|
| 44 |
+
img.thumbnail(size, Image.Resampling.LANCZOS)
|
| 45 |
+
return img
|
| 46 |
+
|
| 47 |
+
def _encode_thumbnail(self, size: tuple[int, int] = (128, 128)) -> str:
|
| 48 |
+
"""Encode thumbnail as base64 JPEG."""
|
| 49 |
+
thumb = self.get_thumbnail(size)
|
| 50 |
+
if thumb.mode in ("RGBA", "P"):
|
| 51 |
+
thumb = thumb.convert("RGB")
|
| 52 |
+
buffer = io.BytesIO()
|
| 53 |
+
thumb.save(buffer, format="JPEG", quality=85)
|
| 54 |
+
return base64.b64encode(buffer.getvalue()).decode("utf-8")
|
| 55 |
+
|
| 56 |
+
def get_thumbnail_base64(self, size: tuple[int, int] = (128, 128)) -> str:
|
| 57 |
+
"""Get thumbnail as base64 encoded string."""
|
| 58 |
+
return self.thumbnail_base64 or self._encode_thumbnail(size)
|
| 59 |
+
|
| 60 |
+
def cache_thumbnail(self, size: tuple[int, int] = (128, 128)) -> None:
|
| 61 |
+
"""Cache the thumbnail as base64 for persistence."""
|
| 62 |
+
if self.thumbnail_base64 is None:
|
| 63 |
+
self.thumbnail_base64 = self._encode_thumbnail(size)
|
| 64 |
+
|
| 65 |
+
def to_api_dict(self, include_thumbnail: bool = True) -> dict[str, Any]:
|
| 66 |
+
"""Convert to dictionary for API response."""
|
| 67 |
+
# Ensure dimensions are populated (loads image if needed but not cached)
|
| 68 |
+
if self.width is None or self.height is None:
|
| 69 |
+
self.ensure_dimensions()
|
| 70 |
+
|
| 71 |
+
data = {
|
| 72 |
+
"id": self.id,
|
| 73 |
+
"filepath": self.filepath,
|
| 74 |
+
"filename": self.filename,
|
| 75 |
+
"label": self.label,
|
| 76 |
+
"metadata": self.metadata,
|
| 77 |
+
"width": self.width,
|
| 78 |
+
"height": self.height,
|
| 79 |
+
}
|
| 80 |
+
if include_thumbnail:
|
| 81 |
+
data["thumbnail"] = self.get_thumbnail_base64()
|
| 82 |
+
return data
|
| 83 |
+
|
| 84 |
+
def ensure_dimensions(self) -> None:
|
| 85 |
+
"""Load image dimensions if not already set."""
|
| 86 |
+
if self.width is None or self.height is None:
|
| 87 |
+
try:
|
| 88 |
+
img = self.load_image()
|
| 89 |
+
self.width, self.height = img.size
|
| 90 |
+
except Exception:
|
| 91 |
+
# If image can't be loaded, leave as None
|
| 92 |
+
pass
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
|
vendor/hyperview/core/selection.py
ADDED
|
@@ -0,0 +1,309 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Selection / geometry helpers.
|
| 2 |
+
|
| 3 |
+
This module contains small, backend-agnostic utilities used by selection
|
| 4 |
+
endpoints for both 2D and 3D lasso selection.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from __future__ import annotations
|
| 8 |
+
|
| 9 |
+
import math
|
| 10 |
+
|
| 11 |
+
import numpy as np
|
| 12 |
+
from pydantic import BaseModel
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def points_in_polygon(points_xy: np.ndarray, polygon_xy: np.ndarray) -> np.ndarray:
|
| 16 |
+
"""Vectorized point-in-polygon (even-odd rule / ray casting).
|
| 17 |
+
|
| 18 |
+
Args:
|
| 19 |
+
points_xy: Array of shape (m, 2) with point coordinates.
|
| 20 |
+
polygon_xy: Array of shape (n, 2) with polygon vertices.
|
| 21 |
+
|
| 22 |
+
Returns:
|
| 23 |
+
Boolean mask of length m, True where point lies inside polygon.
|
| 24 |
+
|
| 25 |
+
Notes:
|
| 26 |
+
Boundary points may be classified as outside depending on floating point
|
| 27 |
+
ties (common for lasso selection tools).
|
| 28 |
+
"""
|
| 29 |
+
if polygon_xy.shape[0] < 3:
|
| 30 |
+
return np.zeros((points_xy.shape[0],), dtype=bool)
|
| 31 |
+
|
| 32 |
+
x = points_xy[:, 0]
|
| 33 |
+
y = points_xy[:, 1]
|
| 34 |
+
poly_x = polygon_xy[:, 0]
|
| 35 |
+
poly_y = polygon_xy[:, 1]
|
| 36 |
+
|
| 37 |
+
inside = np.zeros((points_xy.shape[0],), dtype=bool)
|
| 38 |
+
j = polygon_xy.shape[0] - 1
|
| 39 |
+
|
| 40 |
+
for i in range(polygon_xy.shape[0]):
|
| 41 |
+
xi = poly_x[i]
|
| 42 |
+
yi = poly_y[i]
|
| 43 |
+
xj = poly_x[j]
|
| 44 |
+
yj = poly_y[j]
|
| 45 |
+
|
| 46 |
+
# Half-open y-interval to avoid double-counting vertices.
|
| 47 |
+
intersects = (yi > y) != (yj > y)
|
| 48 |
+
|
| 49 |
+
denom = yj - yi
|
| 50 |
+
# denom == 0 => intersects is always False; add tiny epsilon to avoid warnings.
|
| 51 |
+
x_intersect = (xj - xi) * (y - yi) / (denom + 1e-30) + xi
|
| 52 |
+
|
| 53 |
+
inside ^= intersects & (x < x_intersect)
|
| 54 |
+
j = i
|
| 55 |
+
|
| 56 |
+
return inside
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class OrbitViewState3D(BaseModel):
|
| 60 |
+
"""Orbit camera state for server-side 3D lasso projection."""
|
| 61 |
+
|
| 62 |
+
yaw: float
|
| 63 |
+
pitch: float
|
| 64 |
+
distance: float
|
| 65 |
+
target_x: float
|
| 66 |
+
target_y: float
|
| 67 |
+
target_z: float
|
| 68 |
+
ortho_scale: float
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def _normalize_vec(v: np.ndarray) -> np.ndarray:
|
| 72 |
+
norm = float(np.linalg.norm(v))
|
| 73 |
+
if norm < 1e-9:
|
| 74 |
+
return np.zeros_like(v, dtype=np.float32)
|
| 75 |
+
return (v / norm).astype(np.float32)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def _mat4_look_at(eye: np.ndarray, target: np.ndarray, up: np.ndarray) -> np.ndarray:
|
| 79 |
+
"""Column-major lookAt matrix matching hyper-scatter 3D renderer math."""
|
| 80 |
+
z_axis = _normalize_vec(eye - target)
|
| 81 |
+
if float(np.linalg.norm(z_axis)) < 1e-9:
|
| 82 |
+
z_axis = np.array([0.0, 0.0, 1.0], dtype=np.float32)
|
| 83 |
+
|
| 84 |
+
x_axis = _normalize_vec(np.cross(up, z_axis))
|
| 85 |
+
if float(np.linalg.norm(x_axis)) < 1e-9:
|
| 86 |
+
x_axis = np.array([1.0, 0.0, 0.0], dtype=np.float32)
|
| 87 |
+
|
| 88 |
+
y_axis = np.cross(z_axis, x_axis).astype(np.float32)
|
| 89 |
+
|
| 90 |
+
out = np.zeros(16, dtype=np.float32)
|
| 91 |
+
out[0] = x_axis[0]
|
| 92 |
+
out[1] = y_axis[0]
|
| 93 |
+
out[2] = z_axis[0]
|
| 94 |
+
out[3] = 0.0
|
| 95 |
+
|
| 96 |
+
out[4] = x_axis[1]
|
| 97 |
+
out[5] = y_axis[1]
|
| 98 |
+
out[6] = z_axis[1]
|
| 99 |
+
out[7] = 0.0
|
| 100 |
+
|
| 101 |
+
out[8] = x_axis[2]
|
| 102 |
+
out[9] = y_axis[2]
|
| 103 |
+
out[10] = z_axis[2]
|
| 104 |
+
out[11] = 0.0
|
| 105 |
+
|
| 106 |
+
out[12] = -float(np.dot(x_axis, eye))
|
| 107 |
+
out[13] = -float(np.dot(y_axis, eye))
|
| 108 |
+
out[14] = -float(np.dot(z_axis, eye))
|
| 109 |
+
out[15] = 1.0
|
| 110 |
+
return out
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def _mat4_ortho(
|
| 114 |
+
left: float,
|
| 115 |
+
right: float,
|
| 116 |
+
bottom: float,
|
| 117 |
+
top: float,
|
| 118 |
+
near: float,
|
| 119 |
+
far: float,
|
| 120 |
+
) -> np.ndarray:
|
| 121 |
+
"""Column-major orthographic matrix matching hyper-scatter 3D renderer math."""
|
| 122 |
+
rl = right - left
|
| 123 |
+
tb = top - bottom
|
| 124 |
+
fn = far - near
|
| 125 |
+
|
| 126 |
+
if abs(rl) < 1e-12 or abs(tb) < 1e-12 or abs(fn) < 1e-12:
|
| 127 |
+
raise ValueError("Invalid orthographic projection bounds")
|
| 128 |
+
|
| 129 |
+
out = np.zeros(16, dtype=np.float32)
|
| 130 |
+
out[0] = 2.0 / rl
|
| 131 |
+
out[5] = 2.0 / tb
|
| 132 |
+
out[10] = -2.0 / fn
|
| 133 |
+
out[12] = -(right + left) / rl
|
| 134 |
+
out[13] = -(top + bottom) / tb
|
| 135 |
+
out[14] = -(far + near) / fn
|
| 136 |
+
out[15] = 1.0
|
| 137 |
+
return out
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def _mat4_multiply(a: np.ndarray, b: np.ndarray) -> np.ndarray:
|
| 141 |
+
"""Column-major matrix multiply matching hyper-scatter mat4Multiply."""
|
| 142 |
+
out = np.zeros(16, dtype=np.float32)
|
| 143 |
+
for col in range(4):
|
| 144 |
+
for row in range(4):
|
| 145 |
+
out[col * 4 + row] = (
|
| 146 |
+
a[0 * 4 + row] * b[col * 4 + 0]
|
| 147 |
+
+ a[1 * 4 + row] * b[col * 4 + 1]
|
| 148 |
+
+ a[2 * 4 + row] * b[col * 4 + 2]
|
| 149 |
+
+ a[3 * 4 + row] * b[col * 4 + 3]
|
| 150 |
+
)
|
| 151 |
+
return out
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def _build_mvp_for_orbit(
|
| 155 |
+
view: OrbitViewState3D,
|
| 156 |
+
coords: np.ndarray,
|
| 157 |
+
viewport_width: int,
|
| 158 |
+
viewport_height: int,
|
| 159 |
+
) -> np.ndarray:
|
| 160 |
+
cp = math.cos(view.pitch)
|
| 161 |
+
sp = math.sin(view.pitch)
|
| 162 |
+
cy = math.cos(view.yaw)
|
| 163 |
+
sy = math.sin(view.yaw)
|
| 164 |
+
|
| 165 |
+
dir_target_to_eye = np.array([cp * sy, sp, cp * cy], dtype=np.float32)
|
| 166 |
+
target = np.array([view.target_x, view.target_y, view.target_z], dtype=np.float32)
|
| 167 |
+
eye = target + dir_target_to_eye * float(view.distance)
|
| 168 |
+
|
| 169 |
+
world_up = np.array([0.0, 1.0, 0.0], dtype=np.float32)
|
| 170 |
+
view_matrix = _mat4_look_at(eye, target, world_up)
|
| 171 |
+
|
| 172 |
+
aspect = float(viewport_width) / float(max(1, viewport_height))
|
| 173 |
+
half_h = max(0.01, float(view.ortho_scale))
|
| 174 |
+
half_w = half_h * aspect
|
| 175 |
+
|
| 176 |
+
center = np.mean(coords, axis=0)
|
| 177 |
+
scene_radius = float(np.max(np.linalg.norm(coords - center, axis=1)))
|
| 178 |
+
scene_radius = max(scene_radius, 0.25)
|
| 179 |
+
|
| 180 |
+
near = 0.01
|
| 181 |
+
far = max(near + 1.0, float(view.distance) + scene_radius * 6.0 + 10.0)
|
| 182 |
+
|
| 183 |
+
proj_matrix = _mat4_ortho(-half_w, half_w, -half_h, half_h, near, far)
|
| 184 |
+
return _mat4_multiply(proj_matrix, view_matrix)
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
def _project_points_3d_to_screen(
|
| 188 |
+
mvp: np.ndarray,
|
| 189 |
+
coords: np.ndarray,
|
| 190 |
+
width: int,
|
| 191 |
+
height: int,
|
| 192 |
+
) -> tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
|
| 193 |
+
x = coords[:, 0]
|
| 194 |
+
y = coords[:, 1]
|
| 195 |
+
z = coords[:, 2]
|
| 196 |
+
|
| 197 |
+
clip_x = mvp[0] * x + mvp[4] * y + mvp[8] * z + mvp[12]
|
| 198 |
+
clip_y = mvp[1] * x + mvp[5] * y + mvp[9] * z + mvp[13]
|
| 199 |
+
clip_z = mvp[2] * x + mvp[6] * y + mvp[10] * z + mvp[14]
|
| 200 |
+
clip_w = mvp[3] * x + mvp[7] * y + mvp[11] * z + mvp[15]
|
| 201 |
+
|
| 202 |
+
inv_w = np.zeros_like(clip_w)
|
| 203 |
+
np.divide(1.0, clip_w, out=inv_w, where=np.abs(clip_w) > 1e-12)
|
| 204 |
+
ndc_x = clip_x * inv_w
|
| 205 |
+
ndc_y = clip_y * inv_w
|
| 206 |
+
ndc_z = clip_z * inv_w
|
| 207 |
+
|
| 208 |
+
finite = np.isfinite(ndc_x) & np.isfinite(ndc_y) & np.isfinite(ndc_z)
|
| 209 |
+
in_clip = (
|
| 210 |
+
finite
|
| 211 |
+
& (ndc_x >= -1.0)
|
| 212 |
+
& (ndc_x <= 1.0)
|
| 213 |
+
& (ndc_y >= -1.0)
|
| 214 |
+
& (ndc_y <= 1.0)
|
| 215 |
+
& (ndc_z >= -1.0)
|
| 216 |
+
& (ndc_z <= 1.0)
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
screen_x = (ndc_x * 0.5 + 0.5) * float(width)
|
| 220 |
+
screen_y = (1.0 - (ndc_y * 0.5 + 0.5)) * float(height)
|
| 221 |
+
depth = ndc_z * 0.5 + 0.5
|
| 222 |
+
|
| 223 |
+
pixel_x = np.floor(screen_x).astype(np.int64)
|
| 224 |
+
pixel_y = np.floor(screen_y).astype(np.int64)
|
| 225 |
+
pixel_ok = (
|
| 226 |
+
in_clip
|
| 227 |
+
& (pixel_x >= 0)
|
| 228 |
+
& (pixel_x < width)
|
| 229 |
+
& (pixel_y >= 0)
|
| 230 |
+
& (pixel_y < height)
|
| 231 |
+
)
|
| 232 |
+
|
| 233 |
+
pixel_index = np.full(coords.shape[0], -1, dtype=np.int64)
|
| 234 |
+
pixel_index[pixel_ok] = pixel_y[pixel_ok] * width + pixel_x[pixel_ok]
|
| 235 |
+
|
| 236 |
+
return screen_x, screen_y, depth, pixel_index
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
def select_ids_for_3d_lasso(
|
| 240 |
+
*,
|
| 241 |
+
ids: list[str],
|
| 242 |
+
labels: list[str | None],
|
| 243 |
+
coords: np.ndarray,
|
| 244 |
+
geometry: str,
|
| 245 |
+
polygon: np.ndarray,
|
| 246 |
+
view: OrbitViewState3D,
|
| 247 |
+
viewport_width: int,
|
| 248 |
+
viewport_height: int,
|
| 249 |
+
label_filter: str | None,
|
| 250 |
+
) -> list[str]:
|
| 251 |
+
width = max(1, int(viewport_width))
|
| 252 |
+
height = max(1, int(viewport_height))
|
| 253 |
+
|
| 254 |
+
if label_filter is not None:
|
| 255 |
+
label_mask = np.fromiter(
|
| 256 |
+
(label == label_filter for label in labels),
|
| 257 |
+
dtype=bool,
|
| 258 |
+
count=len(labels),
|
| 259 |
+
)
|
| 260 |
+
if not np.any(label_mask):
|
| 261 |
+
return []
|
| 262 |
+
kept_indices = np.flatnonzero(label_mask)
|
| 263 |
+
ids = [ids[int(i)] for i in kept_indices]
|
| 264 |
+
coords = coords[label_mask]
|
| 265 |
+
|
| 266 |
+
if geometry == "spherical":
|
| 267 |
+
norms = np.linalg.norm(coords, axis=1, keepdims=True)
|
| 268 |
+
coords = coords / np.maximum(norms, 1e-8)
|
| 269 |
+
|
| 270 |
+
mvp = _build_mvp_for_orbit(view, coords, width, height)
|
| 271 |
+
screen_x, screen_y, depth, pixel_index = _project_points_3d_to_screen(
|
| 272 |
+
mvp, coords, width, height
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
pixel_count = max(1, width * height)
|
| 276 |
+
depth_buffer = np.full(pixel_count, np.inf, dtype=np.float32)
|
| 277 |
+
|
| 278 |
+
valid_mask = pixel_index >= 0
|
| 279 |
+
valid_pixels = pixel_index[valid_mask]
|
| 280 |
+
valid_depth = depth[valid_mask]
|
| 281 |
+
np.minimum.at(depth_buffer, valid_pixels, valid_depth)
|
| 282 |
+
|
| 283 |
+
visible_mask = np.zeros(coords.shape[0], dtype=bool)
|
| 284 |
+
visible_mask[valid_mask] = depth[valid_mask] <= depth_buffer[valid_pixels] + 1e-4
|
| 285 |
+
|
| 286 |
+
x_min = float(np.min(polygon[:, 0]))
|
| 287 |
+
x_max = float(np.max(polygon[:, 0]))
|
| 288 |
+
y_min = float(np.min(polygon[:, 1]))
|
| 289 |
+
y_max = float(np.max(polygon[:, 1]))
|
| 290 |
+
|
| 291 |
+
candidate_mask = (
|
| 292 |
+
visible_mask
|
| 293 |
+
& (screen_x >= x_min)
|
| 294 |
+
& (screen_x <= x_max)
|
| 295 |
+
& (screen_y >= y_min)
|
| 296 |
+
& (screen_y <= y_max)
|
| 297 |
+
)
|
| 298 |
+
|
| 299 |
+
candidate_indices = np.flatnonzero(candidate_mask)
|
| 300 |
+
if candidate_indices.size == 0:
|
| 301 |
+
return []
|
| 302 |
+
|
| 303 |
+
candidate_points = np.column_stack(
|
| 304 |
+
(screen_x[candidate_indices], screen_y[candidate_indices])
|
| 305 |
+
).astype(np.float32)
|
| 306 |
+
inside = points_in_polygon(candidate_points, polygon)
|
| 307 |
+
selected_indices = candidate_indices[np.flatnonzero(inside)]
|
| 308 |
+
|
| 309 |
+
return [ids[int(i)] for i in selected_indices]
|
vendor/hyperview/embeddings/__init__.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Embedding computation and projection."""
|
| 2 |
+
|
| 3 |
+
from hyperview.embeddings.compute import EmbeddingComputer
|
| 4 |
+
from hyperview.embeddings.engine import (
|
| 5 |
+
EmbeddingSpec,
|
| 6 |
+
get_engine,
|
| 7 |
+
get_provider_info,
|
| 8 |
+
list_embedding_providers,
|
| 9 |
+
)
|
| 10 |
+
|
| 11 |
+
# Register HyperView providers into LanceDB registry.
|
| 12 |
+
import hyperview.embeddings.providers.lancedb_providers as _lancedb_providers # noqa: F401
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def __getattr__(name: str):
|
| 16 |
+
"""Lazy import for heavy dependencies (UMAP/numba)."""
|
| 17 |
+
if name == "ProjectionEngine":
|
| 18 |
+
from hyperview.embeddings.projection import ProjectionEngine
|
| 19 |
+
return ProjectionEngine
|
| 20 |
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
__all__ = [
|
| 24 |
+
"EmbeddingComputer",
|
| 25 |
+
"EmbeddingSpec",
|
| 26 |
+
"ProjectionEngine",
|
| 27 |
+
# Provider utilities
|
| 28 |
+
"get_engine",
|
| 29 |
+
"get_provider_info",
|
| 30 |
+
"list_embedding_providers",
|
| 31 |
+
]
|
vendor/hyperview/embeddings/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (1.14 kB). View file
|
|
|
vendor/hyperview/embeddings/__pycache__/compute.cpython-312.pyc
ADDED
|
Binary file (5.13 kB). View file
|
|
|
vendor/hyperview/embeddings/__pycache__/engine.cpython-312.pyc
ADDED
|
Binary file (15.8 kB). View file
|
|
|
vendor/hyperview/embeddings/compute.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Image embedding computation via EmbedAnything."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import tempfile
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
|
| 7 |
+
import numpy as np
|
| 8 |
+
from embed_anything import EmbeddingModel
|
| 9 |
+
from PIL import Image
|
| 10 |
+
|
| 11 |
+
from hyperview.core.sample import Sample
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class EmbeddingComputer:
|
| 15 |
+
"""Compute embeddings for image samples using EmbedAnything."""
|
| 16 |
+
|
| 17 |
+
def __init__(self, model: str):
|
| 18 |
+
"""Initialize the embedding computer.
|
| 19 |
+
|
| 20 |
+
Args:
|
| 21 |
+
model: HuggingFace model ID to load via EmbedAnything.
|
| 22 |
+
"""
|
| 23 |
+
if not model or not model.strip():
|
| 24 |
+
raise ValueError("model must be a non-empty HuggingFace model_id")
|
| 25 |
+
|
| 26 |
+
self.model_id = model
|
| 27 |
+
self._model: EmbeddingModel | None = None
|
| 28 |
+
|
| 29 |
+
def _get_model(self) -> EmbeddingModel:
|
| 30 |
+
"""Lazily initialize the EmbedAnything model."""
|
| 31 |
+
if self._model is None:
|
| 32 |
+
self._model = EmbeddingModel.from_pretrained_hf(model_id=self.model_id)
|
| 33 |
+
return self._model
|
| 34 |
+
|
| 35 |
+
def _load_rgb_image(self, sample: Sample) -> Image.Image:
|
| 36 |
+
"""Load an image and normalize it to RGB.
|
| 37 |
+
|
| 38 |
+
For file-backed samples, returns an in-memory copy and closes the file
|
| 39 |
+
handle immediately to avoid leaking descriptors during batch processing.
|
| 40 |
+
"""
|
| 41 |
+
with sample.load_image() as img:
|
| 42 |
+
img.load()
|
| 43 |
+
if img.mode != "RGB":
|
| 44 |
+
return img.convert("RGB")
|
| 45 |
+
return img.copy()
|
| 46 |
+
|
| 47 |
+
def _embed_file(self, file_path: str) -> np.ndarray:
|
| 48 |
+
model = self._get_model()
|
| 49 |
+
result = model.embed_file(file_path)
|
| 50 |
+
|
| 51 |
+
if not result:
|
| 52 |
+
raise RuntimeError(f"EmbedAnything returned no embeddings for: {file_path}")
|
| 53 |
+
if len(result) != 1:
|
| 54 |
+
raise RuntimeError(
|
| 55 |
+
f"Expected 1 embedding for an image file, got {len(result)}: {file_path}"
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
return np.asarray(result[0].embedding, dtype=np.float32)
|
| 59 |
+
|
| 60 |
+
def _embed_pil_image(self, image: Image.Image) -> np.ndarray:
|
| 61 |
+
temp_fd, temp_path = tempfile.mkstemp(suffix=".png")
|
| 62 |
+
os.close(temp_fd)
|
| 63 |
+
try:
|
| 64 |
+
image.save(temp_path, format="PNG")
|
| 65 |
+
return self._embed_file(temp_path)
|
| 66 |
+
finally:
|
| 67 |
+
Path(temp_path).unlink(missing_ok=True)
|
| 68 |
+
|
| 69 |
+
def compute_single(self, sample: Sample) -> np.ndarray:
|
| 70 |
+
"""Compute embedding for a single sample."""
|
| 71 |
+
image = self._load_rgb_image(sample)
|
| 72 |
+
return self._embed_pil_image(image)
|
| 73 |
+
|
| 74 |
+
def compute_batch(
|
| 75 |
+
self,
|
| 76 |
+
samples: list[Sample],
|
| 77 |
+
batch_size: int = 32,
|
| 78 |
+
show_progress: bool = True,
|
| 79 |
+
) -> list[np.ndarray]:
|
| 80 |
+
"""Compute embeddings for a list of samples."""
|
| 81 |
+
if batch_size <= 0:
|
| 82 |
+
raise ValueError("batch_size must be > 0")
|
| 83 |
+
self._get_model()
|
| 84 |
+
|
| 85 |
+
if show_progress:
|
| 86 |
+
print(f"Computing embeddings for {len(samples)} samples...")
|
| 87 |
+
|
| 88 |
+
return [self.compute_single(sample) for sample in samples]
|
| 89 |
+
|
vendor/hyperview/embeddings/engine.py
ADDED
|
@@ -0,0 +1,391 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Embedding spec + engine built on LanceDB's embedding registry."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import hashlib
|
| 6 |
+
import json
|
| 7 |
+
import math
|
| 8 |
+
import time
|
| 9 |
+
from dataclasses import dataclass, field
|
| 10 |
+
from typing import Any, Literal
|
| 11 |
+
|
| 12 |
+
import numpy as np
|
| 13 |
+
|
| 14 |
+
# Register HyperView providers into LanceDB registry.
|
| 15 |
+
import hyperview.embeddings.providers.lancedb_providers as _lancedb_providers # noqa: F401
|
| 16 |
+
|
| 17 |
+
__all__ = [
|
| 18 |
+
"EmbeddingSpec",
|
| 19 |
+
"EmbeddingEngine",
|
| 20 |
+
"get_engine",
|
| 21 |
+
"list_embedding_providers",
|
| 22 |
+
"get_provider_info",
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
HYPERBOLIC_PROVIDERS = frozenset({"hyper-models"})
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _format_elapsed(seconds: float) -> str:
|
| 29 |
+
if seconds < 60:
|
| 30 |
+
return f"{seconds:.1f}s"
|
| 31 |
+
total_seconds = int(round(seconds))
|
| 32 |
+
minutes, secs = divmod(total_seconds, 60)
|
| 33 |
+
if minutes < 60:
|
| 34 |
+
return f"{minutes}m {secs:02d}s"
|
| 35 |
+
hours, minutes = divmod(minutes, 60)
|
| 36 |
+
return f"{hours}h {minutes:02d}m {secs:02d}s"
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _format_eta(seconds: float) -> str:
|
| 40 |
+
if not math.isfinite(seconds) or seconds < 0:
|
| 41 |
+
return "unknown"
|
| 42 |
+
return _format_elapsed(seconds)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
@dataclass
|
| 46 |
+
class EmbeddingSpec:
|
| 47 |
+
"""Specification for an embedding model.
|
| 48 |
+
|
| 49 |
+
All providers live in the LanceDB registry. HyperView's custom providers
|
| 50 |
+
(embed-anything, hyper-models) are registered on import.
|
| 51 |
+
|
| 52 |
+
Attributes:
|
| 53 |
+
provider: Provider identifier (e.g., 'embed-anything', 'hyper-models', 'open-clip')
|
| 54 |
+
model_id: Model identifier (HuggingFace model_id, checkpoint name, etc.)
|
| 55 |
+
checkpoint: Optional checkpoint path/URL for weight-only models
|
| 56 |
+
provider_kwargs: Additional kwargs passed to the embedding function
|
| 57 |
+
modality: What input type this embedder handles
|
| 58 |
+
"""
|
| 59 |
+
|
| 60 |
+
provider: str
|
| 61 |
+
model_id: str | None = None
|
| 62 |
+
checkpoint: str | None = None
|
| 63 |
+
provider_kwargs: dict[str, Any] = field(default_factory=dict)
|
| 64 |
+
modality: Literal["image", "text", "multimodal"] = "image"
|
| 65 |
+
|
| 66 |
+
@property
|
| 67 |
+
def geometry(self) -> Literal["euclidean", "hyperboloid"]:
|
| 68 |
+
"""Get the output geometry for this spec."""
|
| 69 |
+
|
| 70 |
+
if self.provider == "hyper-models":
|
| 71 |
+
model_name = self.model_id or self.provider_kwargs.get("name")
|
| 72 |
+
if model_name is None:
|
| 73 |
+
return "hyperboloid"
|
| 74 |
+
import hyper_models
|
| 75 |
+
|
| 76 |
+
geom = str(hyper_models.get_model_info(str(model_name)).geometry)
|
| 77 |
+
return "hyperboloid" if geom in ("hyperboloid", "poincare") else "euclidean"
|
| 78 |
+
|
| 79 |
+
if self.provider in HYPERBOLIC_PROVIDERS:
|
| 80 |
+
return "hyperboloid"
|
| 81 |
+
return "euclidean"
|
| 82 |
+
|
| 83 |
+
def to_dict(self) -> dict[str, Any]:
|
| 84 |
+
"""Convert to JSON-serializable dict for persistence."""
|
| 85 |
+
d: dict[str, Any] = {
|
| 86 |
+
"provider": self.provider,
|
| 87 |
+
"modality": self.modality,
|
| 88 |
+
"geometry": self.geometry,
|
| 89 |
+
}
|
| 90 |
+
if self.model_id:
|
| 91 |
+
d["model_id"] = self.model_id
|
| 92 |
+
if self.checkpoint:
|
| 93 |
+
d["checkpoint"] = self.checkpoint
|
| 94 |
+
if self.provider_kwargs:
|
| 95 |
+
d["provider_kwargs"] = self.provider_kwargs
|
| 96 |
+
return d
|
| 97 |
+
|
| 98 |
+
@classmethod
|
| 99 |
+
def from_dict(cls, d: dict[str, Any]) -> EmbeddingSpec:
|
| 100 |
+
"""Create from dict (e.g., loaded from JSON)."""
|
| 101 |
+
return cls(
|
| 102 |
+
provider=d["provider"],
|
| 103 |
+
model_id=d.get("model_id"),
|
| 104 |
+
checkpoint=d.get("checkpoint"),
|
| 105 |
+
provider_kwargs=d.get("provider_kwargs", {}),
|
| 106 |
+
modality=d.get("modality", "image"),
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
def content_hash(self) -> str:
|
| 110 |
+
"""Generate a short hash of the spec for collision-resistant keys."""
|
| 111 |
+
content = json.dumps(self.to_dict(), sort_keys=True)
|
| 112 |
+
return hashlib.sha256(content.encode()).hexdigest()[:12]
|
| 113 |
+
|
| 114 |
+
def make_space_key(self) -> str:
|
| 115 |
+
"""Generate a collision-resistant space_key from this spec.
|
| 116 |
+
|
| 117 |
+
Format: {provider}__{slugified_model_id}__{content_hash}
|
| 118 |
+
"""
|
| 119 |
+
from hyperview.storage.schema import slugify_model_id
|
| 120 |
+
|
| 121 |
+
model_part = self.model_id or self.checkpoint or "default"
|
| 122 |
+
slug = slugify_model_id(model_part)
|
| 123 |
+
content_hash = self.content_hash()
|
| 124 |
+
return f"{self.provider}__{slug}__{content_hash}"
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class EmbeddingEngine:
|
| 128 |
+
"""Embedding engine using LanceDB registry.
|
| 129 |
+
|
| 130 |
+
All providers are accessed through the LanceDB embedding registry.
|
| 131 |
+
HyperView providers are registered automatically on import.
|
| 132 |
+
"""
|
| 133 |
+
|
| 134 |
+
def __init__(self) -> None:
|
| 135 |
+
self._cache: dict[str, Any] = {} # spec_hash -> embedding function
|
| 136 |
+
|
| 137 |
+
def get_function(self, spec: EmbeddingSpec) -> Any:
|
| 138 |
+
"""Get an embedding function from LanceDB registry.
|
| 139 |
+
|
| 140 |
+
Args:
|
| 141 |
+
spec: Embedding specification.
|
| 142 |
+
|
| 143 |
+
Returns:
|
| 144 |
+
LanceDB EmbeddingFunction instance.
|
| 145 |
+
|
| 146 |
+
Raises:
|
| 147 |
+
ValueError: If provider not found in registry.
|
| 148 |
+
"""
|
| 149 |
+
cache_key = spec.content_hash()
|
| 150 |
+
if cache_key in self._cache:
|
| 151 |
+
return self._cache[cache_key]
|
| 152 |
+
|
| 153 |
+
from lancedb.embeddings import get_registry
|
| 154 |
+
|
| 155 |
+
registry = get_registry()
|
| 156 |
+
|
| 157 |
+
# Get provider factory from registry
|
| 158 |
+
try:
|
| 159 |
+
factory = registry.get(spec.provider)
|
| 160 |
+
except KeyError:
|
| 161 |
+
available = list_embedding_providers()
|
| 162 |
+
raise ValueError(
|
| 163 |
+
f"Unknown provider: '{spec.provider}'. "
|
| 164 |
+
f"Available: {', '.join(sorted(available))}"
|
| 165 |
+
) from None
|
| 166 |
+
|
| 167 |
+
create_kwargs: dict[str, Any] = {}
|
| 168 |
+
if spec.model_id:
|
| 169 |
+
create_kwargs["name"] = spec.model_id
|
| 170 |
+
|
| 171 |
+
if spec.checkpoint:
|
| 172 |
+
create_kwargs["checkpoint"] = spec.checkpoint
|
| 173 |
+
|
| 174 |
+
create_kwargs.update(spec.provider_kwargs)
|
| 175 |
+
|
| 176 |
+
try:
|
| 177 |
+
func = factory.create(**create_kwargs)
|
| 178 |
+
except ImportError as e:
|
| 179 |
+
raise ImportError(
|
| 180 |
+
f"Provider '{spec.provider}' requires additional dependencies. "
|
| 181 |
+
"Install the provider's extra dependencies and try again."
|
| 182 |
+
) from e
|
| 183 |
+
|
| 184 |
+
self._cache[cache_key] = func
|
| 185 |
+
return func
|
| 186 |
+
|
| 187 |
+
def embed_images(
|
| 188 |
+
self,
|
| 189 |
+
samples: list[Any],
|
| 190 |
+
spec: EmbeddingSpec,
|
| 191 |
+
batch_size: int = 32,
|
| 192 |
+
show_progress: bool = True,
|
| 193 |
+
) -> np.ndarray:
|
| 194 |
+
"""Compute embeddings for image samples.
|
| 195 |
+
|
| 196 |
+
Args:
|
| 197 |
+
samples: List of Sample objects with image filepaths.
|
| 198 |
+
spec: Embedding specification.
|
| 199 |
+
batch_size: Batch size for processing.
|
| 200 |
+
show_progress: Whether to show progress.
|
| 201 |
+
|
| 202 |
+
Returns:
|
| 203 |
+
Array of shape (N, D) where N is len(samples) and D is embedding dim.
|
| 204 |
+
"""
|
| 205 |
+
provider_target = spec.model_id or spec.checkpoint or spec.provider
|
| 206 |
+
if show_progress:
|
| 207 |
+
print(
|
| 208 |
+
f"Preparing embedding provider '{spec.provider}' ({provider_target})...",
|
| 209 |
+
flush=True,
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
func = self.get_function(spec)
|
| 213 |
+
|
| 214 |
+
if hasattr(func, "set_progress_enabled"):
|
| 215 |
+
func.set_progress_enabled(show_progress)
|
| 216 |
+
|
| 217 |
+
if show_progress:
|
| 218 |
+
print(f"Computing embeddings for {len(samples)} samples...", flush=True)
|
| 219 |
+
|
| 220 |
+
all_embeddings: list[np.ndarray] = []
|
| 221 |
+
total_samples = len(samples)
|
| 222 |
+
total_batches = max(1, math.ceil(total_samples / batch_size))
|
| 223 |
+
report_every_batches = 1 if total_batches <= 20 else max(1, total_batches // 20)
|
| 224 |
+
started_at = time.perf_counter()
|
| 225 |
+
last_report_at = started_at
|
| 226 |
+
|
| 227 |
+
for batch_index, i in enumerate(range(0, len(samples), batch_size), start=1):
|
| 228 |
+
batch_samples = samples[i:i + batch_size]
|
| 229 |
+
|
| 230 |
+
batch_paths = [s.filepath for s in batch_samples]
|
| 231 |
+
batch_embeddings = func.compute_source_embeddings(batch_paths)
|
| 232 |
+
all_embeddings.extend(batch_embeddings)
|
| 233 |
+
|
| 234 |
+
if not show_progress:
|
| 235 |
+
continue
|
| 236 |
+
|
| 237 |
+
completed = len(all_embeddings)
|
| 238 |
+
now = time.perf_counter()
|
| 239 |
+
should_report = batch_index == 1 or batch_index == total_batches
|
| 240 |
+
if batch_index % report_every_batches == 0:
|
| 241 |
+
should_report = True
|
| 242 |
+
if now - last_report_at >= 10.0:
|
| 243 |
+
should_report = True
|
| 244 |
+
if not should_report:
|
| 245 |
+
continue
|
| 246 |
+
|
| 247 |
+
elapsed = max(now - started_at, 1e-9)
|
| 248 |
+
rate = completed / elapsed
|
| 249 |
+
remaining = total_samples - completed
|
| 250 |
+
eta_seconds = remaining / rate if rate > 0 else float("inf")
|
| 251 |
+
print(
|
| 252 |
+
f"Embedded {completed}/{total_samples} samples "
|
| 253 |
+
f"({completed / total_samples:.0%}, batch {batch_index}/{total_batches}, "
|
| 254 |
+
f"{rate:.1f}/s, elapsed {_format_elapsed(elapsed)}, "
|
| 255 |
+
f"ETA {_format_eta(eta_seconds)})",
|
| 256 |
+
flush=True,
|
| 257 |
+
)
|
| 258 |
+
last_report_at = now
|
| 259 |
+
|
| 260 |
+
return np.array(all_embeddings, dtype=np.float32)
|
| 261 |
+
|
| 262 |
+
def embed_texts(
|
| 263 |
+
self,
|
| 264 |
+
texts: list[str],
|
| 265 |
+
spec: EmbeddingSpec,
|
| 266 |
+
) -> np.ndarray:
|
| 267 |
+
"""Compute embeddings for text inputs.
|
| 268 |
+
|
| 269 |
+
Args:
|
| 270 |
+
texts: List of text strings.
|
| 271 |
+
spec: Embedding specification.
|
| 272 |
+
|
| 273 |
+
Returns:
|
| 274 |
+
Array of shape (N, D).
|
| 275 |
+
"""
|
| 276 |
+
func = self.get_function(spec)
|
| 277 |
+
|
| 278 |
+
if hasattr(func, "generate_embeddings"):
|
| 279 |
+
out = func.generate_embeddings(texts)
|
| 280 |
+
return np.asarray(out, dtype=np.float32)
|
| 281 |
+
|
| 282 |
+
embeddings: list[np.ndarray] = []
|
| 283 |
+
for text in texts:
|
| 284 |
+
out = func.compute_query_embeddings(text)
|
| 285 |
+
if not out:
|
| 286 |
+
raise RuntimeError(f"Provider '{spec.provider}' returned no embedding for query")
|
| 287 |
+
embeddings.append(np.asarray(out[0], dtype=np.float32))
|
| 288 |
+
return np.vstack(embeddings)
|
| 289 |
+
|
| 290 |
+
def get_space_config(self, spec: EmbeddingSpec, dim: int) -> dict[str, Any]:
|
| 291 |
+
"""Get space configuration for storage.
|
| 292 |
+
|
| 293 |
+
Args:
|
| 294 |
+
spec: Embedding specification.
|
| 295 |
+
dim: Embedding dimension.
|
| 296 |
+
|
| 297 |
+
Returns:
|
| 298 |
+
Config dict for SpaceInfo.config_json.
|
| 299 |
+
"""
|
| 300 |
+
func = self.get_function(spec)
|
| 301 |
+
|
| 302 |
+
config = spec.to_dict()
|
| 303 |
+
config["dim"] = dim
|
| 304 |
+
|
| 305 |
+
if hasattr(func, "geometry"):
|
| 306 |
+
config["geometry"] = func.geometry
|
| 307 |
+
if hasattr(func, "curvature") and func.curvature is not None:
|
| 308 |
+
config["curvature"] = func.curvature
|
| 309 |
+
|
| 310 |
+
if config.get("geometry") == "hyperboloid":
|
| 311 |
+
config["spatial_dim"] = dim - 1
|
| 312 |
+
|
| 313 |
+
return config
|
| 314 |
+
|
| 315 |
+
|
| 316 |
+
_ENGINE: EmbeddingEngine | None = None
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def get_engine() -> EmbeddingEngine:
|
| 320 |
+
"""Get the global embedding engine singleton."""
|
| 321 |
+
global _ENGINE
|
| 322 |
+
if _ENGINE is None:
|
| 323 |
+
_ENGINE = EmbeddingEngine()
|
| 324 |
+
return _ENGINE
|
| 325 |
+
|
| 326 |
+
|
| 327 |
+
def list_embedding_providers(available_only: bool = False) -> list[str]:
|
| 328 |
+
"""List all registered embedding providers.
|
| 329 |
+
|
| 330 |
+
Args:
|
| 331 |
+
available_only: If True, only return providers whose dependencies are installed.
|
| 332 |
+
|
| 333 |
+
Returns:
|
| 334 |
+
List of provider identifiers.
|
| 335 |
+
"""
|
| 336 |
+
from lancedb.embeddings import get_registry
|
| 337 |
+
|
| 338 |
+
registry = get_registry()
|
| 339 |
+
|
| 340 |
+
all_providers = list(getattr(registry, "_functions", {}).keys())
|
| 341 |
+
|
| 342 |
+
if not available_only:
|
| 343 |
+
return sorted(all_providers)
|
| 344 |
+
|
| 345 |
+
available: list[str] = []
|
| 346 |
+
for provider in all_providers:
|
| 347 |
+
try:
|
| 348 |
+
factory = registry.get(provider)
|
| 349 |
+
factory.create()
|
| 350 |
+
available.append(provider)
|
| 351 |
+
except ImportError:
|
| 352 |
+
pass
|
| 353 |
+
except (TypeError, ValueError):
|
| 354 |
+
available.append(provider)
|
| 355 |
+
|
| 356 |
+
return sorted(available)
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
def get_provider_info(provider: str) -> dict[str, Any]:
|
| 360 |
+
"""Get information about an embedding provider.
|
| 361 |
+
|
| 362 |
+
Args:
|
| 363 |
+
provider: Provider identifier.
|
| 364 |
+
|
| 365 |
+
Returns:
|
| 366 |
+
Dict with provider info.
|
| 367 |
+
"""
|
| 368 |
+
from lancedb.embeddings import get_registry
|
| 369 |
+
|
| 370 |
+
registry = get_registry()
|
| 371 |
+
|
| 372 |
+
try:
|
| 373 |
+
factory = registry.get(provider)
|
| 374 |
+
except KeyError:
|
| 375 |
+
raise ValueError(f"Unknown provider: {provider}") from None
|
| 376 |
+
|
| 377 |
+
info: dict[str, Any] = {
|
| 378 |
+
"provider": provider,
|
| 379 |
+
"source": "hyperview" if provider in ("embed-anything", "hyper-models") else "lancedb",
|
| 380 |
+
"geometry": "hyperboloid" if provider in HYPERBOLIC_PROVIDERS else "euclidean",
|
| 381 |
+
}
|
| 382 |
+
|
| 383 |
+
try:
|
| 384 |
+
factory.create()
|
| 385 |
+
info["installed"] = True
|
| 386 |
+
except ImportError:
|
| 387 |
+
info["installed"] = False
|
| 388 |
+
except (TypeError, ValueError):
|
| 389 |
+
info["installed"] = True
|
| 390 |
+
|
| 391 |
+
return info
|
vendor/hyperview/embeddings/pipelines.py
ADDED
|
@@ -0,0 +1,265 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Compute orchestration pipelines for HyperView.
|
| 2 |
+
|
| 3 |
+
These functions coordinate embedding computation and layout/projection
|
| 4 |
+
computation, persisting results into the configured storage backend.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from __future__ import annotations
|
| 8 |
+
|
| 9 |
+
import time
|
| 10 |
+
from typing import Any
|
| 11 |
+
|
| 12 |
+
import numpy as np
|
| 13 |
+
|
| 14 |
+
from hyperview.storage.backend import StorageBackend
|
| 15 |
+
from hyperview.storage.schema import make_layout_key, normalize_layout_dimension
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def _format_elapsed(seconds: float) -> str:
|
| 19 |
+
if seconds < 60:
|
| 20 |
+
return f"{seconds:.1f}s"
|
| 21 |
+
total_seconds = int(round(seconds))
|
| 22 |
+
minutes, secs = divmod(total_seconds, 60)
|
| 23 |
+
if minutes < 60:
|
| 24 |
+
return f"{minutes}m {secs:02d}s"
|
| 25 |
+
hours, minutes = divmod(minutes, 60)
|
| 26 |
+
return f"{hours}h {minutes:02d}m {secs:02d}s"
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def compute_embeddings(
|
| 30 |
+
storage: StorageBackend,
|
| 31 |
+
spec: Any,
|
| 32 |
+
batch_size: int = 32,
|
| 33 |
+
sample_ids: list[str] | None = None,
|
| 34 |
+
show_progress: bool = True,
|
| 35 |
+
) -> tuple[str, int, int]:
|
| 36 |
+
"""Compute embeddings for samples that don't have them yet.
|
| 37 |
+
|
| 38 |
+
Args:
|
| 39 |
+
storage: Storage backend to read samples from and write embeddings to.
|
| 40 |
+
spec: Embedding specification (provider, model_id, etc.)
|
| 41 |
+
batch_size: Batch size for processing.
|
| 42 |
+
sample_ids: Optional subset of sample IDs to ensure embeddings for.
|
| 43 |
+
If omitted, embeddings are ensured for the full dataset.
|
| 44 |
+
show_progress: Whether to show progress bar.
|
| 45 |
+
|
| 46 |
+
Returns:
|
| 47 |
+
Tuple of (space_key, num_computed, num_skipped).
|
| 48 |
+
|
| 49 |
+
Raises:
|
| 50 |
+
ValueError: If no samples exist, requested sample IDs are missing,
|
| 51 |
+
or the provider cannot be resolved.
|
| 52 |
+
"""
|
| 53 |
+
from hyperview.embeddings.engine import get_engine
|
| 54 |
+
|
| 55 |
+
engine = get_engine()
|
| 56 |
+
|
| 57 |
+
if sample_ids is None:
|
| 58 |
+
target_samples = storage.get_all_samples()
|
| 59 |
+
if not target_samples:
|
| 60 |
+
raise ValueError("No samples in storage")
|
| 61 |
+
else:
|
| 62 |
+
if not sample_ids:
|
| 63 |
+
raise ValueError("sample_ids must contain at least one sample ID")
|
| 64 |
+
|
| 65 |
+
requested_sample_ids = list(dict.fromkeys(sample_ids))
|
| 66 |
+
target_samples = storage.get_samples_by_ids(requested_sample_ids)
|
| 67 |
+
found_sample_ids = {sample.id for sample in target_samples}
|
| 68 |
+
missing_sample_ids = [sample_id for sample_id in requested_sample_ids if sample_id not in found_sample_ids]
|
| 69 |
+
if missing_sample_ids:
|
| 70 |
+
raise ValueError(
|
| 71 |
+
"Requested sample_ids were not found in storage: "
|
| 72 |
+
f"{missing_sample_ids[:5]}"
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
# Generate space key before computing (deterministic from spec)
|
| 76 |
+
space_key = spec.make_space_key()
|
| 77 |
+
target_sample_ids = [sample.id for sample in target_samples]
|
| 78 |
+
|
| 79 |
+
if not storage.get_space(space_key):
|
| 80 |
+
missing_ids = target_sample_ids
|
| 81 |
+
elif sample_ids is None:
|
| 82 |
+
missing_ids = storage.get_missing_embedding_ids(space_key)
|
| 83 |
+
else:
|
| 84 |
+
embedded_ids = storage.get_embedded_ids(space_key)
|
| 85 |
+
missing_ids = [sample_id for sample_id in target_sample_ids if sample_id not in embedded_ids]
|
| 86 |
+
|
| 87 |
+
num_skipped = len(target_sample_ids) - len(missing_ids)
|
| 88 |
+
|
| 89 |
+
if not missing_ids:
|
| 90 |
+
if show_progress:
|
| 91 |
+
scope = "requested" if sample_ids is not None else "all"
|
| 92 |
+
print(
|
| 93 |
+
f"All {len(target_sample_ids)} {scope} samples already have embeddings "
|
| 94 |
+
f"in space '{space_key}'"
|
| 95 |
+
)
|
| 96 |
+
return space_key, 0, num_skipped
|
| 97 |
+
|
| 98 |
+
samples_to_embed = storage.get_samples_by_ids(missing_ids)
|
| 99 |
+
|
| 100 |
+
if show_progress and num_skipped > 0:
|
| 101 |
+
print(f"Skipped {num_skipped} samples with existing embeddings")
|
| 102 |
+
|
| 103 |
+
embeddings = engine.embed_images(
|
| 104 |
+
samples=samples_to_embed,
|
| 105 |
+
spec=spec,
|
| 106 |
+
batch_size=batch_size,
|
| 107 |
+
show_progress=show_progress,
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
dim = embeddings.shape[1]
|
| 111 |
+
config = engine.get_space_config(spec, dim)
|
| 112 |
+
storage.ensure_space(
|
| 113 |
+
model_id=spec.model_id or spec.provider,
|
| 114 |
+
dim=dim,
|
| 115 |
+
config=config,
|
| 116 |
+
space_key=space_key,
|
| 117 |
+
)
|
| 118 |
+
|
| 119 |
+
ids = [sample.id for sample in samples_to_embed]
|
| 120 |
+
storage.add_embeddings(space_key, ids, embeddings)
|
| 121 |
+
|
| 122 |
+
return space_key, len(ids), num_skipped
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def compute_layout(
|
| 126 |
+
storage: StorageBackend,
|
| 127 |
+
space_key: str | None = None,
|
| 128 |
+
method: str = "umap",
|
| 129 |
+
geometry: str = "euclidean",
|
| 130 |
+
layout_dimension: int = 2,
|
| 131 |
+
n_neighbors: int = 15,
|
| 132 |
+
min_dist: float = 0.1,
|
| 133 |
+
metric: str = "cosine",
|
| 134 |
+
force: bool = False,
|
| 135 |
+
show_progress: bool = True,
|
| 136 |
+
) -> str:
|
| 137 |
+
"""Compute layout/projection for visualization.
|
| 138 |
+
|
| 139 |
+
Args:
|
| 140 |
+
storage: Storage backend with embeddings.
|
| 141 |
+
space_key: Embedding space to project. If None, uses the first available.
|
| 142 |
+
method: Projection method ('umap' supported).
|
| 143 |
+
geometry: Output geometry type ('euclidean', 'poincare', or 'spherical').
|
| 144 |
+
layout_dimension: Visualization dimension (2D or 3D).
|
| 145 |
+
n_neighbors: Number of neighbors for UMAP.
|
| 146 |
+
min_dist: Minimum distance for UMAP.
|
| 147 |
+
metric: Distance metric for UMAP.
|
| 148 |
+
force: Force recomputation even if layout exists.
|
| 149 |
+
show_progress: Whether to print progress messages.
|
| 150 |
+
|
| 151 |
+
Returns:
|
| 152 |
+
layout_key for the computed layout.
|
| 153 |
+
|
| 154 |
+
Raises:
|
| 155 |
+
ValueError: If no embedding spaces, space not found, or insufficient samples.
|
| 156 |
+
"""
|
| 157 |
+
from hyperview.embeddings.projection import ProjectionEngine
|
| 158 |
+
|
| 159 |
+
if method != "umap":
|
| 160 |
+
raise ValueError(f"Invalid method: {method}. Only 'umap' is supported.")
|
| 161 |
+
layout_dimension = normalize_layout_dimension(layout_dimension)
|
| 162 |
+
|
| 163 |
+
if geometry not in ("euclidean", "poincare", "spherical"):
|
| 164 |
+
raise ValueError(
|
| 165 |
+
f"Invalid geometry: {geometry}. Must be 'euclidean', 'poincare', or 'spherical'."
|
| 166 |
+
)
|
| 167 |
+
if geometry == "poincare" and layout_dimension != 2:
|
| 168 |
+
raise ValueError("Poincare layouts currently require 2D output.")
|
| 169 |
+
|
| 170 |
+
if space_key is None:
|
| 171 |
+
spaces = storage.list_spaces()
|
| 172 |
+
if not spaces:
|
| 173 |
+
raise ValueError("No embedding spaces. Call compute_embeddings() first.")
|
| 174 |
+
|
| 175 |
+
# Choose a sensible default space based on the requested output geometry.
|
| 176 |
+
# - For Poincaré output, prefer a hyperbolic (hyperboloid) embedding space if present.
|
| 177 |
+
# - For non-hyperbolic output, prefer a Euclidean embedding space if present.
|
| 178 |
+
if geometry == "poincare":
|
| 179 |
+
preferred = next((s for s in spaces if s.geometry == "hyperboloid"), None)
|
| 180 |
+
else:
|
| 181 |
+
preferred = next((s for s in spaces if s.geometry != "hyperboloid"), None)
|
| 182 |
+
|
| 183 |
+
space_key = preferred.space_key if preferred is not None else spaces[0].space_key
|
| 184 |
+
|
| 185 |
+
space = storage.get_space(space_key)
|
| 186 |
+
if space is None:
|
| 187 |
+
raise ValueError(f"Space not found: {space_key}")
|
| 188 |
+
|
| 189 |
+
input_geometry = space.geometry
|
| 190 |
+
curvature = (space.config or {}).get("curvature")
|
| 191 |
+
|
| 192 |
+
ids, vectors = storage.get_embeddings(space_key)
|
| 193 |
+
if len(ids) == 0:
|
| 194 |
+
raise ValueError(f"No embeddings in space '{space_key}'. Call compute_embeddings() first.")
|
| 195 |
+
|
| 196 |
+
if len(ids) < 3:
|
| 197 |
+
raise ValueError(f"Need at least 3 samples for visualization, have {len(ids)}")
|
| 198 |
+
|
| 199 |
+
layout_params = {
|
| 200 |
+
"n_neighbors": n_neighbors,
|
| 201 |
+
"min_dist": min_dist,
|
| 202 |
+
"metric": metric,
|
| 203 |
+
}
|
| 204 |
+
|
| 205 |
+
normalize_input = geometry == "spherical"
|
| 206 |
+
|
| 207 |
+
layout_key = make_layout_key(
|
| 208 |
+
space_key,
|
| 209 |
+
method,
|
| 210 |
+
geometry,
|
| 211 |
+
layout_dimension=layout_dimension,
|
| 212 |
+
params=layout_params,
|
| 213 |
+
)
|
| 214 |
+
|
| 215 |
+
if not force:
|
| 216 |
+
existing_layout = storage.get_layout(layout_key)
|
| 217 |
+
if existing_layout is not None:
|
| 218 |
+
existing_ids, _ = storage.get_layout_coords(layout_key)
|
| 219 |
+
if set(existing_ids) == set(ids):
|
| 220 |
+
if show_progress:
|
| 221 |
+
print(f"Layout '{layout_key}' already exists with {len(ids)} points")
|
| 222 |
+
return layout_key
|
| 223 |
+
if show_progress:
|
| 224 |
+
print("Layout exists but has different samples, recomputing...")
|
| 225 |
+
|
| 226 |
+
if show_progress:
|
| 227 |
+
print(
|
| 228 |
+
f"Computing {geometry} {method} layout "
|
| 229 |
+
f"({layout_dimension}D) for {len(ids)} samples..."
|
| 230 |
+
)
|
| 231 |
+
|
| 232 |
+
storage.ensure_layout(
|
| 233 |
+
layout_key=layout_key,
|
| 234 |
+
space_key=space_key,
|
| 235 |
+
method=method,
|
| 236 |
+
geometry=geometry,
|
| 237 |
+
params=layout_params,
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
engine = ProjectionEngine()
|
| 241 |
+
started_at = time.perf_counter()
|
| 242 |
+
coords = engine.project(
|
| 243 |
+
vectors,
|
| 244 |
+
input_geometry=input_geometry,
|
| 245 |
+
output_geometry=geometry,
|
| 246 |
+
n_components=layout_dimension,
|
| 247 |
+
normalize_input=normalize_input,
|
| 248 |
+
curvature=curvature,
|
| 249 |
+
method=method,
|
| 250 |
+
n_neighbors=n_neighbors,
|
| 251 |
+
min_dist=min_dist,
|
| 252 |
+
metric=metric,
|
| 253 |
+
verbose=show_progress,
|
| 254 |
+
)
|
| 255 |
+
|
| 256 |
+
if show_progress:
|
| 257 |
+
print(
|
| 258 |
+
f"Computed {geometry} {method} layout in "
|
| 259 |
+
f"{_format_elapsed(time.perf_counter() - started_at)}",
|
| 260 |
+
flush=True,
|
| 261 |
+
)
|
| 262 |
+
|
| 263 |
+
storage.add_layout_coords(layout_key, ids, coords)
|
| 264 |
+
|
| 265 |
+
return layout_key
|
vendor/hyperview/embeddings/projection.py
ADDED
|
@@ -0,0 +1,292 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Projection methods for dimensionality reduction."""
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import warnings
|
| 5 |
+
|
| 6 |
+
import numpy as np
|
| 7 |
+
import umap
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class ProjectionEngine:
|
| 13 |
+
"""Engine for projecting high-dimensional embeddings to low-dimensional layouts."""
|
| 14 |
+
|
| 15 |
+
def l2_normalize_rows(self, embeddings: np.ndarray) -> np.ndarray:
|
| 16 |
+
"""L2-normalize embeddings row-wise with numerical safeguards."""
|
| 17 |
+
norms = np.linalg.norm(embeddings, axis=1, keepdims=True)
|
| 18 |
+
norms = np.maximum(norms, 1e-12)
|
| 19 |
+
return (embeddings / norms).astype(np.float32)
|
| 20 |
+
|
| 21 |
+
def to_poincare_ball(
|
| 22 |
+
self,
|
| 23 |
+
hyperboloid_embeddings: np.ndarray,
|
| 24 |
+
curvature: float | None = None,
|
| 25 |
+
clamp_radius: float = 0.999999,
|
| 26 |
+
) -> np.ndarray:
|
| 27 |
+
"""Convert hyperboloid (Lorentz) coordinates to Poincaré ball coordinates.
|
| 28 |
+
|
| 29 |
+
Input is expected to be shape (N, D+1) with first coordinate being time-like.
|
| 30 |
+
Points are assumed to satisfy: t^2 - ||x||^2 = 1/c (c > 0).
|
| 31 |
+
|
| 32 |
+
Returns Poincaré ball coordinates of shape (N, D) in the unit ball.
|
| 33 |
+
|
| 34 |
+
Notes:
|
| 35 |
+
- Many hyperbolic libraries parameterize curvature as a positive number c
|
| 36 |
+
where the manifold has sectional curvature -c.
|
| 37 |
+
- We map to the unit ball for downstream distance metrics (UMAP 'poincare').
|
| 38 |
+
"""
|
| 39 |
+
if hyperboloid_embeddings.ndim != 2 or hyperboloid_embeddings.shape[1] < 2:
|
| 40 |
+
raise ValueError(
|
| 41 |
+
"hyperboloid_embeddings must have shape (N, D+1) with D>=1"
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
c = float(curvature) if curvature is not None else 1.0
|
| 45 |
+
if c <= 0:
|
| 46 |
+
raise ValueError(f"curvature must be > 0, got {c}")
|
| 47 |
+
|
| 48 |
+
# Radius R = 1/sqrt(c) for curvature -c
|
| 49 |
+
R = 1.0 / np.sqrt(c)
|
| 50 |
+
|
| 51 |
+
t = hyperboloid_embeddings[:, :1]
|
| 52 |
+
x = hyperboloid_embeddings[:, 1:]
|
| 53 |
+
|
| 54 |
+
# Map to ball radius R: u_R = x / (t + R)
|
| 55 |
+
denom = t + R
|
| 56 |
+
u_R = x / denom
|
| 57 |
+
|
| 58 |
+
# Rescale to unit ball: u = u_R / R = sqrt(c) * u_R
|
| 59 |
+
u = u_R / R
|
| 60 |
+
|
| 61 |
+
# Numerical guard: ensure inside the unit ball
|
| 62 |
+
radii = np.linalg.norm(u, axis=1)
|
| 63 |
+
mask = radii >= clamp_radius
|
| 64 |
+
if np.any(mask):
|
| 65 |
+
u[mask] = u[mask] / radii[mask][:, np.newaxis] * clamp_radius
|
| 66 |
+
|
| 67 |
+
return u.astype(np.float32)
|
| 68 |
+
|
| 69 |
+
def project(
|
| 70 |
+
self,
|
| 71 |
+
embeddings: np.ndarray,
|
| 72 |
+
*,
|
| 73 |
+
input_geometry: str = "euclidean",
|
| 74 |
+
output_geometry: str = "euclidean",
|
| 75 |
+
n_components: int = 2,
|
| 76 |
+
normalize_input: bool = False,
|
| 77 |
+
curvature: float | None = None,
|
| 78 |
+
method: str = "umap",
|
| 79 |
+
n_neighbors: int = 15,
|
| 80 |
+
min_dist: float = 0.1,
|
| 81 |
+
metric: str = "cosine",
|
| 82 |
+
random_state: int = 42,
|
| 83 |
+
verbose: bool = False,
|
| 84 |
+
) -> np.ndarray:
|
| 85 |
+
"""Project embeddings with geometry-aware preprocessing.
|
| 86 |
+
|
| 87 |
+
This separates two concerns:
|
| 88 |
+
1) Geometry/model transforms for the *input* embeddings (e.g. hyperboloid -> Poincaré)
|
| 89 |
+
2) Dimensionality reduction / layout (currently UMAP)
|
| 90 |
+
|
| 91 |
+
Args:
|
| 92 |
+
embeddings: Input embeddings (N x D) or hyperboloid (N x D+1).
|
| 93 |
+
input_geometry: Geometry/model of the input embeddings (euclidean, hyperboloid).
|
| 94 |
+
output_geometry: Geometry of the output coordinates (euclidean, poincare, spherical).
|
| 95 |
+
n_components: Number of output dimensions.
|
| 96 |
+
normalize_input: Whether to L2-normalize vectors before projection.
|
| 97 |
+
curvature: Curvature parameter for hyperbolic embeddings (positive c).
|
| 98 |
+
method: Layout method (currently only 'umap').
|
| 99 |
+
n_neighbors: UMAP neighbors.
|
| 100 |
+
min_dist: UMAP min_dist.
|
| 101 |
+
metric: Input metric (used for euclidean inputs).
|
| 102 |
+
random_state: Random seed.
|
| 103 |
+
|
| 104 |
+
Returns:
|
| 105 |
+
Layout coordinates (N x n_components).
|
| 106 |
+
"""
|
| 107 |
+
if method != "umap":
|
| 108 |
+
raise ValueError(f"Invalid method: {method}. Only 'umap' is supported.")
|
| 109 |
+
if n_components < 2:
|
| 110 |
+
raise ValueError(f"n_components must be >= 2, got {n_components}")
|
| 111 |
+
|
| 112 |
+
prepared = embeddings
|
| 113 |
+
prepared_metric: str = metric
|
| 114 |
+
|
| 115 |
+
if normalize_input:
|
| 116 |
+
prepared = self.l2_normalize_rows(prepared)
|
| 117 |
+
|
| 118 |
+
if input_geometry == "hyperboloid":
|
| 119 |
+
# Convert to unit Poincaré ball and use UMAP's built-in hyperbolic distance.
|
| 120 |
+
prepared = self.to_poincare_ball(embeddings, curvature=curvature)
|
| 121 |
+
prepared_metric = "poincare"
|
| 122 |
+
|
| 123 |
+
if output_geometry == "poincare":
|
| 124 |
+
if n_components != 2:
|
| 125 |
+
raise ValueError("Poincare layouts currently require 2D output")
|
| 126 |
+
return self.project_to_poincare(
|
| 127 |
+
prepared,
|
| 128 |
+
n_neighbors=n_neighbors,
|
| 129 |
+
min_dist=min_dist,
|
| 130 |
+
metric=prepared_metric,
|
| 131 |
+
random_state=random_state,
|
| 132 |
+
verbose=verbose,
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
if output_geometry in ("euclidean", "spherical"):
|
| 136 |
+
return self.project_umap(
|
| 137 |
+
prepared,
|
| 138 |
+
n_neighbors=n_neighbors,
|
| 139 |
+
min_dist=min_dist,
|
| 140 |
+
metric=prepared_metric,
|
| 141 |
+
n_components=n_components,
|
| 142 |
+
random_state=random_state,
|
| 143 |
+
verbose=verbose,
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
raise ValueError(
|
| 147 |
+
f"Invalid output_geometry: {output_geometry}. "
|
| 148 |
+
"Must be 'euclidean', 'poincare', or 'spherical'."
|
| 149 |
+
)
|
| 150 |
+
|
| 151 |
+
def project_umap(
|
| 152 |
+
self,
|
| 153 |
+
embeddings: np.ndarray,
|
| 154 |
+
n_neighbors: int = 15,
|
| 155 |
+
min_dist: float = 0.1,
|
| 156 |
+
metric: str = "cosine",
|
| 157 |
+
n_components: int = 2,
|
| 158 |
+
random_state: int = 42,
|
| 159 |
+
verbose: bool = False,
|
| 160 |
+
) -> np.ndarray:
|
| 161 |
+
"""Project embeddings to Euclidean layout coordinates using UMAP."""
|
| 162 |
+
n_neighbors = min(n_neighbors, len(embeddings) - 1)
|
| 163 |
+
if n_neighbors < 2:
|
| 164 |
+
n_neighbors = 2
|
| 165 |
+
|
| 166 |
+
n_jobs = 1 if random_state is not None else -1
|
| 167 |
+
|
| 168 |
+
reducer = umap.UMAP(
|
| 169 |
+
n_neighbors=n_neighbors,
|
| 170 |
+
min_dist=min_dist,
|
| 171 |
+
n_components=n_components,
|
| 172 |
+
metric=metric,
|
| 173 |
+
random_state=random_state,
|
| 174 |
+
n_jobs=n_jobs,
|
| 175 |
+
verbose=verbose,
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
coords = reducer.fit_transform(embeddings)
|
| 179 |
+
coords = self._normalize_coords(coords)
|
| 180 |
+
|
| 181 |
+
return coords
|
| 182 |
+
|
| 183 |
+
def project_to_poincare(
|
| 184 |
+
self,
|
| 185 |
+
embeddings: np.ndarray,
|
| 186 |
+
n_neighbors: int = 15,
|
| 187 |
+
min_dist: float = 0.1,
|
| 188 |
+
metric: str = "cosine",
|
| 189 |
+
random_state: int = 42,
|
| 190 |
+
verbose: bool = False,
|
| 191 |
+
) -> np.ndarray:
|
| 192 |
+
"""Project embeddings to the Poincaré disk using UMAP with hyperboloid output."""
|
| 193 |
+
n_neighbors = min(n_neighbors, len(embeddings) - 1)
|
| 194 |
+
if n_neighbors < 2:
|
| 195 |
+
n_neighbors = 2
|
| 196 |
+
|
| 197 |
+
n_jobs = 1 if random_state is not None else -1
|
| 198 |
+
|
| 199 |
+
# Suppress warning about missing gradient for poincare metric (only affects inverse_transform)
|
| 200 |
+
with warnings.catch_warnings():
|
| 201 |
+
warnings.filterwarnings("ignore", message="gradient function is not yet implemented")
|
| 202 |
+
reducer = umap.UMAP(
|
| 203 |
+
n_neighbors=n_neighbors,
|
| 204 |
+
min_dist=min_dist,
|
| 205 |
+
n_components=2,
|
| 206 |
+
metric=metric,
|
| 207 |
+
output_metric="hyperboloid",
|
| 208 |
+
random_state=random_state,
|
| 209 |
+
n_jobs=n_jobs,
|
| 210 |
+
verbose=verbose,
|
| 211 |
+
)
|
| 212 |
+
spatial_coords = reducer.fit_transform(embeddings)
|
| 213 |
+
|
| 214 |
+
squared_norm = np.sum(spatial_coords**2, axis=1)
|
| 215 |
+
t = np.sqrt(1 + squared_norm)
|
| 216 |
+
|
| 217 |
+
# Project to Poincaré disk: u = x / (1 + t)
|
| 218 |
+
denom = 1 + t
|
| 219 |
+
poincare_coords = spatial_coords / denom[:, np.newaxis]
|
| 220 |
+
|
| 221 |
+
# Clamp to unit disk for numerical stability
|
| 222 |
+
radii = np.linalg.norm(poincare_coords, axis=1)
|
| 223 |
+
max_radius = 0.999
|
| 224 |
+
mask = radii > max_radius
|
| 225 |
+
if np.any(mask):
|
| 226 |
+
logger.warning(f"Clamping {np.sum(mask)} points to unit disk.")
|
| 227 |
+
poincare_coords[mask] = (
|
| 228 |
+
poincare_coords[mask] / radii[mask][:, np.newaxis] * max_radius
|
| 229 |
+
)
|
| 230 |
+
|
| 231 |
+
poincare_coords = self._center_poincare(poincare_coords)
|
| 232 |
+
poincare_coords = self._scale_poincare(poincare_coords, factor=0.65)
|
| 233 |
+
|
| 234 |
+
return poincare_coords
|
| 235 |
+
|
| 236 |
+
def _scale_poincare(self, coords: np.ndarray, factor: float) -> np.ndarray:
|
| 237 |
+
"""Scale points towards the origin in hyperbolic space.
|
| 238 |
+
|
| 239 |
+
Scales hyperbolic distance from origin by `factor`. If factor < 1, points move closer to center.
|
| 240 |
+
"""
|
| 241 |
+
radii = np.linalg.norm(coords, axis=1)
|
| 242 |
+
mask = radii > 1e-6
|
| 243 |
+
|
| 244 |
+
r = radii[mask]
|
| 245 |
+
r = np.minimum(r, 0.9999999)
|
| 246 |
+
r_new = np.tanh(factor * np.arctanh(r))
|
| 247 |
+
|
| 248 |
+
scale_ratios = np.ones_like(radii)
|
| 249 |
+
scale_ratios[mask] = r_new / r
|
| 250 |
+
|
| 251 |
+
return coords * scale_ratios[:, np.newaxis]
|
| 252 |
+
|
| 253 |
+
def _center_poincare(self, coords: np.ndarray) -> np.ndarray:
|
| 254 |
+
"""Center points in the Poincaré disk using a Möbius transformation."""
|
| 255 |
+
if len(coords) == 0:
|
| 256 |
+
return coords
|
| 257 |
+
|
| 258 |
+
z = coords[:, 0] + 1j * coords[:, 1]
|
| 259 |
+
centroid = np.mean(z)
|
| 260 |
+
|
| 261 |
+
if np.abs(centroid) > 0.99 or np.abs(centroid) < 1e-6:
|
| 262 |
+
return coords
|
| 263 |
+
|
| 264 |
+
# Möbius transformation: w = (z - a) / (1 - conj(a) * z)
|
| 265 |
+
a = centroid
|
| 266 |
+
w = (z - a) / (1 - np.conj(a) * z)
|
| 267 |
+
|
| 268 |
+
return np.stack([w.real, w.imag], axis=1)
|
| 269 |
+
|
| 270 |
+
def _normalize_coords(self, coords: np.ndarray) -> np.ndarray:
|
| 271 |
+
"""Normalize coordinates to [-1, 1] range."""
|
| 272 |
+
if len(coords) == 0:
|
| 273 |
+
return coords
|
| 274 |
+
|
| 275 |
+
coords = coords - coords.mean(axis=0)
|
| 276 |
+
max_abs = np.abs(coords).max()
|
| 277 |
+
if max_abs > 0:
|
| 278 |
+
coords = coords / max_abs * 0.95
|
| 279 |
+
|
| 280 |
+
return coords
|
| 281 |
+
|
| 282 |
+
def poincare_distance(self, u: np.ndarray, v: np.ndarray) -> float:
|
| 283 |
+
"""Compute the Poincaré distance between two points."""
|
| 284 |
+
u_norm_sq = np.sum(u**2)
|
| 285 |
+
v_norm_sq = np.sum(v**2)
|
| 286 |
+
diff_norm_sq = np.sum((u - v) ** 2)
|
| 287 |
+
|
| 288 |
+
u_norm_sq = min(u_norm_sq, 0.99999)
|
| 289 |
+
v_norm_sq = min(v_norm_sq, 0.99999)
|
| 290 |
+
|
| 291 |
+
delta = 2 * diff_norm_sq / ((1 - u_norm_sq) * (1 - v_norm_sq))
|
| 292 |
+
return np.arccosh(1 + delta)
|
vendor/hyperview/embeddings/providers/__init__.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Embedding providers.
|
| 2 |
+
|
| 3 |
+
HyperView integrates with LanceDB's embedding registry.
|
| 4 |
+
Custom providers are registered in `lancedb_providers.py`.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
__all__: list[str] = []
|
vendor/hyperview/embeddings/providers/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (478 Bytes). View file
|
|
|
vendor/hyperview/embeddings/providers/__pycache__/lancedb_providers.cpython-312.pyc
ADDED
|
Binary file (18.9 kB). View file
|
|
|
vendor/hyperview/embeddings/providers/lancedb_providers.py
ADDED
|
@@ -0,0 +1,374 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""LanceDB-registered embedding providers for HyperView.
|
| 2 |
+
|
| 3 |
+
This module registers HyperView's embedding providers into the LanceDB embedding
|
| 4 |
+
registry using the @register decorator.
|
| 5 |
+
|
| 6 |
+
Providers:
|
| 7 |
+
- embed-anything: CLIP-based image embeddings (torch-free, default)
|
| 8 |
+
- hyper-models: Non-Euclidean model zoo via `hyper-models` (torch-free ONNX; downloads from HF Hub)
|
| 9 |
+
- timm-image: Image backbones loaded with timm (e.g. MegaDescriptor)
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
from __future__ import annotations
|
| 13 |
+
|
| 14 |
+
from typing import Any
|
| 15 |
+
|
| 16 |
+
import numpy as np
|
| 17 |
+
from lancedb.embeddings import EmbeddingFunction, register
|
| 18 |
+
from pydantic import PrivateAttr
|
| 19 |
+
|
| 20 |
+
__all__ = [
|
| 21 |
+
"EmbedAnythingEmbeddings",
|
| 22 |
+
"HyperModelsEmbeddings",
|
| 23 |
+
"TimmImageEmbeddings",
|
| 24 |
+
]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@register("embed-anything")
|
| 28 |
+
class EmbedAnythingEmbeddings(EmbeddingFunction):
|
| 29 |
+
"""CLIP-based image embeddings via embed-anything.
|
| 30 |
+
|
| 31 |
+
This is the default provider for HyperView - lightweight and torch-free.
|
| 32 |
+
|
| 33 |
+
Args:
|
| 34 |
+
name: HuggingFace model ID for CLIP (default: openai/clip-vit-base-patch32)
|
| 35 |
+
batch_size: Batch size for processing
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
name: str = "openai/clip-vit-base-patch32"
|
| 39 |
+
batch_size: int = 32
|
| 40 |
+
|
| 41 |
+
_computer: Any = PrivateAttr(default=None)
|
| 42 |
+
_ndims: int | None = PrivateAttr(default=None)
|
| 43 |
+
|
| 44 |
+
def __init__(self, **kwargs: Any) -> None:
|
| 45 |
+
super().__init__(**kwargs)
|
| 46 |
+
self._computer = None
|
| 47 |
+
self._ndims = None
|
| 48 |
+
|
| 49 |
+
def _get_computer(self) -> Any:
|
| 50 |
+
if self._computer is None:
|
| 51 |
+
from hyperview.embeddings.compute import EmbeddingComputer
|
| 52 |
+
|
| 53 |
+
self._computer = EmbeddingComputer(model=self.name)
|
| 54 |
+
return self._computer
|
| 55 |
+
|
| 56 |
+
def ndims(self) -> int:
|
| 57 |
+
if self._ndims is None:
|
| 58 |
+
if "large" in self.name.lower():
|
| 59 |
+
self._ndims = 768
|
| 60 |
+
elif "clip" in self.name.lower():
|
| 61 |
+
self._ndims = 512
|
| 62 |
+
else:
|
| 63 |
+
self._ndims = 512
|
| 64 |
+
return self._ndims
|
| 65 |
+
|
| 66 |
+
def compute_source_embeddings(
|
| 67 |
+
self, inputs: Any, *args: Any, **kwargs: Any
|
| 68 |
+
) -> list[np.ndarray | None]:
|
| 69 |
+
from hyperview.core.sample import Sample
|
| 70 |
+
|
| 71 |
+
computer = self._get_computer()
|
| 72 |
+
|
| 73 |
+
samples: list[Any] = []
|
| 74 |
+
for inp in self.sanitize_input(inputs):
|
| 75 |
+
if isinstance(inp, Sample):
|
| 76 |
+
samples.append(inp)
|
| 77 |
+
elif isinstance(inp, str):
|
| 78 |
+
samples.append(Sample(id=inp, filepath=inp))
|
| 79 |
+
else:
|
| 80 |
+
raise TypeError(f"Unsupported input type: {type(inp)}")
|
| 81 |
+
|
| 82 |
+
embeddings = computer.compute_batch(samples, batch_size=self.batch_size, show_progress=False)
|
| 83 |
+
return list(embeddings)
|
| 84 |
+
|
| 85 |
+
def compute_query_embeddings(
|
| 86 |
+
self, query: Any, *args: Any, **kwargs: Any
|
| 87 |
+
) -> list[np.ndarray | None]:
|
| 88 |
+
return self.compute_source_embeddings([query], *args, **kwargs)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
@register("hyper-models")
|
| 92 |
+
class HyperModelsEmbeddings(EmbeddingFunction):
|
| 93 |
+
"""Non-Euclidean embeddings via the `hyper-models` package.
|
| 94 |
+
|
| 95 |
+
This provider is a thin wrapper around `hyper_models.load(...)`.
|
| 96 |
+
Models are downloaded from the Hugging Face Hub on first use.
|
| 97 |
+
|
| 98 |
+
Args:
|
| 99 |
+
name: Model name in the hyper-models registry (e.g. 'hycoclip-vit-s').
|
| 100 |
+
checkpoint: Optional local path to an ONNX file (skips hub download).
|
| 101 |
+
batch_size: Batch size hint. Current HyCoCLIP/MERU ONNX exports may only
|
| 102 |
+
support batch_size=1; HyperView encodes one image at a time for
|
| 103 |
+
maximum compatibility.
|
| 104 |
+
"""
|
| 105 |
+
|
| 106 |
+
name: str = "hycoclip-vit-s"
|
| 107 |
+
checkpoint: str | None = None
|
| 108 |
+
batch_size: int = 1
|
| 109 |
+
|
| 110 |
+
_model: Any = PrivateAttr(default=None)
|
| 111 |
+
_model_info: Any = PrivateAttr(default=None)
|
| 112 |
+
|
| 113 |
+
def __init__(self, **kwargs: Any) -> None:
|
| 114 |
+
super().__init__(**kwargs)
|
| 115 |
+
self._model = None
|
| 116 |
+
self._model_info = None
|
| 117 |
+
|
| 118 |
+
def _ensure_model_info(self) -> None:
|
| 119 |
+
if self._model_info is not None:
|
| 120 |
+
return
|
| 121 |
+
|
| 122 |
+
try:
|
| 123 |
+
import hyper_models
|
| 124 |
+
except ImportError as e:
|
| 125 |
+
raise ImportError(
|
| 126 |
+
"Provider 'hyper-models' requires the 'hyper-models' package. "
|
| 127 |
+
"Install it with: `uv pip install hyper-models`"
|
| 128 |
+
) from e
|
| 129 |
+
|
| 130 |
+
try:
|
| 131 |
+
self._model_info = hyper_models.get_model_info(self.name)
|
| 132 |
+
except KeyError:
|
| 133 |
+
available = ", ".join(sorted(hyper_models.list_models()))
|
| 134 |
+
raise ValueError(
|
| 135 |
+
f"Unknown hyper-models model: '{self.name}'. Available: {available}"
|
| 136 |
+
) from None
|
| 137 |
+
|
| 138 |
+
def _ensure_model(self) -> None:
|
| 139 |
+
if self._model is not None:
|
| 140 |
+
return
|
| 141 |
+
|
| 142 |
+
self._ensure_model_info()
|
| 143 |
+
import hyper_models
|
| 144 |
+
|
| 145 |
+
self._model = hyper_models.load(self.name, local_path=self.checkpoint)
|
| 146 |
+
|
| 147 |
+
def ndims(self) -> int:
|
| 148 |
+
self._ensure_model_info()
|
| 149 |
+
assert self._model_info is not None
|
| 150 |
+
return int(getattr(self._model_info, "dim"))
|
| 151 |
+
|
| 152 |
+
@property
|
| 153 |
+
def geometry(self) -> str:
|
| 154 |
+
self._ensure_model_info()
|
| 155 |
+
assert self._model_info is not None
|
| 156 |
+
return str(getattr(self._model_info, "geometry"))
|
| 157 |
+
|
| 158 |
+
def compute_source_embeddings(
|
| 159 |
+
self, inputs: Any, *args: Any, **kwargs: Any
|
| 160 |
+
) -> list[np.ndarray | None]:
|
| 161 |
+
from hyperview.core.sample import Sample
|
| 162 |
+
|
| 163 |
+
self._ensure_model()
|
| 164 |
+
assert self._model is not None
|
| 165 |
+
|
| 166 |
+
inputs = self.sanitize_input(inputs)
|
| 167 |
+
all_embeddings: list[np.ndarray | None] = []
|
| 168 |
+
|
| 169 |
+
from PIL import Image
|
| 170 |
+
|
| 171 |
+
for inp in inputs:
|
| 172 |
+
if isinstance(inp, Sample):
|
| 173 |
+
with inp.load_image() as img:
|
| 174 |
+
img.load()
|
| 175 |
+
if img.mode != "RGB":
|
| 176 |
+
img = img.convert("RGB")
|
| 177 |
+
pil_img = img.copy()
|
| 178 |
+
elif isinstance(inp, str):
|
| 179 |
+
with Image.open(inp) as img:
|
| 180 |
+
img.load()
|
| 181 |
+
if img.mode != "RGB":
|
| 182 |
+
img = img.convert("RGB")
|
| 183 |
+
pil_img = img.copy()
|
| 184 |
+
elif isinstance(inp, Image.Image):
|
| 185 |
+
pil_img = inp.convert("RGB") if inp.mode != "RGB" else inp
|
| 186 |
+
else:
|
| 187 |
+
raise TypeError(f"Unsupported input type: {type(inp)}")
|
| 188 |
+
|
| 189 |
+
emb = self._model.encode_images([pil_img])
|
| 190 |
+
vec = np.asarray(emb[0], dtype=np.float32)
|
| 191 |
+
all_embeddings.append(vec)
|
| 192 |
+
|
| 193 |
+
return all_embeddings
|
| 194 |
+
|
| 195 |
+
def compute_query_embeddings(
|
| 196 |
+
self, query: Any, *args: Any, **kwargs: Any
|
| 197 |
+
) -> list[np.ndarray | None]:
|
| 198 |
+
return self.compute_source_embeddings([query], *args, **kwargs)
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
@register("timm-image")
|
| 202 |
+
class TimmImageEmbeddings(EmbeddingFunction):
|
| 203 |
+
"""Image embeddings via timm backbones.
|
| 204 |
+
|
| 205 |
+
This provider supports timm models, including Hugging Face-hosted timm
|
| 206 |
+
checkpoints like ``hf-hub:BVRA/MegaDescriptor-L-384``.
|
| 207 |
+
|
| 208 |
+
Args:
|
| 209 |
+
name: timm model name (local timm id or ``hf-hub:<repo>``).
|
| 210 |
+
batch_size: Batch size for image encoding.
|
| 211 |
+
device: Explicit torch device (e.g. ``cpu``, ``cuda``). If omitted,
|
| 212 |
+
selects ``cuda`` when available, otherwise ``cpu``.
|
| 213 |
+
"""
|
| 214 |
+
|
| 215 |
+
name: str = "hf-hub:BVRA/MegaDescriptor-L-384"
|
| 216 |
+
batch_size: int = 8
|
| 217 |
+
device: str | None = None
|
| 218 |
+
|
| 219 |
+
_model: Any = PrivateAttr(default=None)
|
| 220 |
+
_transform: Any = PrivateAttr(default=None)
|
| 221 |
+
_torch: Any = PrivateAttr(default=None)
|
| 222 |
+
_device: str | None = PrivateAttr(default=None)
|
| 223 |
+
_ndims: int | None = PrivateAttr(default=None)
|
| 224 |
+
_show_progress: bool = PrivateAttr(default=False)
|
| 225 |
+
|
| 226 |
+
def __init__(self, **kwargs: Any) -> None:
|
| 227 |
+
super().__init__(**kwargs)
|
| 228 |
+
self._model = None
|
| 229 |
+
self._transform = None
|
| 230 |
+
self._torch = None
|
| 231 |
+
self._device = None
|
| 232 |
+
self._ndims = None
|
| 233 |
+
self._show_progress = False
|
| 234 |
+
if self.batch_size <= 0:
|
| 235 |
+
raise ValueError("batch_size must be > 0")
|
| 236 |
+
|
| 237 |
+
def set_progress_enabled(self, enabled: bool) -> None:
|
| 238 |
+
self._show_progress = bool(enabled)
|
| 239 |
+
|
| 240 |
+
def _import_ml_stack(self) -> tuple[Any, Any, Any]:
|
| 241 |
+
try:
|
| 242 |
+
import timm
|
| 243 |
+
import torch
|
| 244 |
+
from torchvision import transforms as tv_transforms
|
| 245 |
+
except ImportError as e:
|
| 246 |
+
raise ImportError(
|
| 247 |
+
"Provider 'timm-image' requires torch/timm/torchvision. "
|
| 248 |
+
"Install with: `uv sync --extra ml`"
|
| 249 |
+
) from e
|
| 250 |
+
return torch, timm, tv_transforms
|
| 251 |
+
|
| 252 |
+
def _ensure_model(self) -> None:
|
| 253 |
+
if self._model is not None:
|
| 254 |
+
return
|
| 255 |
+
|
| 256 |
+
torch, timm, tv_transforms = self._import_ml_stack()
|
| 257 |
+
resolved_device = self.device or (
|
| 258 |
+
"cuda"
|
| 259 |
+
if torch.cuda.is_available()
|
| 260 |
+
else "mps"
|
| 261 |
+
if hasattr(torch.backends, "mps") and torch.backends.mps.is_available()
|
| 262 |
+
else "cpu"
|
| 263 |
+
)
|
| 264 |
+
|
| 265 |
+
if self._show_progress:
|
| 266 |
+
print(
|
| 267 |
+
f"Initializing timm model '{self.name}' on {resolved_device}...",
|
| 268 |
+
flush=True,
|
| 269 |
+
)
|
| 270 |
+
|
| 271 |
+
model = timm.create_model(self.name, num_classes=0, pretrained=True)
|
| 272 |
+
model.eval()
|
| 273 |
+
model.to(resolved_device)
|
| 274 |
+
|
| 275 |
+
cfg = getattr(model, "pretrained_cfg", {}) or {}
|
| 276 |
+
input_size = cfg.get("input_size", (3, 384, 384))
|
| 277 |
+
image_size = int(input_size[-1]) if isinstance(input_size, (tuple, list)) and input_size else 384
|
| 278 |
+
mean = cfg.get("mean", (0.5, 0.5, 0.5))
|
| 279 |
+
std = cfg.get("std", (0.5, 0.5, 0.5))
|
| 280 |
+
|
| 281 |
+
self._transform = tv_transforms.Compose(
|
| 282 |
+
[
|
| 283 |
+
tv_transforms.Resize((image_size, image_size)),
|
| 284 |
+
tv_transforms.ToTensor(),
|
| 285 |
+
tv_transforms.Normalize(mean, std),
|
| 286 |
+
]
|
| 287 |
+
)
|
| 288 |
+
|
| 289 |
+
ndims = int(getattr(model, "num_features", 0))
|
| 290 |
+
if ndims <= 0:
|
| 291 |
+
dummy = torch.zeros((1, 3, image_size, image_size), device=resolved_device)
|
| 292 |
+
with torch.inference_mode():
|
| 293 |
+
out = model(dummy)
|
| 294 |
+
if isinstance(out, (tuple, list)):
|
| 295 |
+
out = out[0]
|
| 296 |
+
ndims = int(out.shape[-1])
|
| 297 |
+
|
| 298 |
+
self._model = model
|
| 299 |
+
self._torch = torch
|
| 300 |
+
self._device = resolved_device
|
| 301 |
+
self._ndims = ndims
|
| 302 |
+
|
| 303 |
+
if self._show_progress:
|
| 304 |
+
print(
|
| 305 |
+
f"timm model ready ({self._ndims} dims on {self._device})",
|
| 306 |
+
flush=True,
|
| 307 |
+
)
|
| 308 |
+
|
| 309 |
+
def ndims(self) -> int:
|
| 310 |
+
self._ensure_model()
|
| 311 |
+
assert self._ndims is not None
|
| 312 |
+
return self._ndims
|
| 313 |
+
|
| 314 |
+
@property
|
| 315 |
+
def geometry(self) -> str:
|
| 316 |
+
return "euclidean"
|
| 317 |
+
|
| 318 |
+
def _load_pil_image(self, inp: Any) -> Any:
|
| 319 |
+
from PIL import Image
|
| 320 |
+
|
| 321 |
+
from hyperview.core.sample import Sample
|
| 322 |
+
|
| 323 |
+
if isinstance(inp, Sample):
|
| 324 |
+
with inp.load_image() as img:
|
| 325 |
+
img.load()
|
| 326 |
+
if img.mode != "RGB":
|
| 327 |
+
img = img.convert("RGB")
|
| 328 |
+
return img.copy()
|
| 329 |
+
if isinstance(inp, str):
|
| 330 |
+
with Image.open(inp) as img:
|
| 331 |
+
img.load()
|
| 332 |
+
if img.mode != "RGB":
|
| 333 |
+
img = img.convert("RGB")
|
| 334 |
+
return img.copy()
|
| 335 |
+
if isinstance(inp, Image.Image):
|
| 336 |
+
return inp.convert("RGB") if inp.mode != "RGB" else inp
|
| 337 |
+
raise TypeError(f"Unsupported input type: {type(inp)}")
|
| 338 |
+
|
| 339 |
+
def compute_source_embeddings(
|
| 340 |
+
self, inputs: Any, *args: Any, **kwargs: Any
|
| 341 |
+
) -> list[np.ndarray | None]:
|
| 342 |
+
self._ensure_model()
|
| 343 |
+
assert self._model is not None
|
| 344 |
+
assert self._transform is not None
|
| 345 |
+
assert self._torch is not None
|
| 346 |
+
assert self._device is not None
|
| 347 |
+
|
| 348 |
+
pil_images = [self._load_pil_image(inp) for inp in self.sanitize_input(inputs)]
|
| 349 |
+
embeddings: list[np.ndarray | None] = []
|
| 350 |
+
|
| 351 |
+
for start in range(0, len(pil_images), self.batch_size):
|
| 352 |
+
batch_imgs = pil_images[start:start + self.batch_size]
|
| 353 |
+
batch_tensors = [self._transform(img) for img in batch_imgs]
|
| 354 |
+
batch = self._torch.stack(batch_tensors).to(self._device)
|
| 355 |
+
|
| 356 |
+
with self._torch.inference_mode():
|
| 357 |
+
out = self._model(batch)
|
| 358 |
+
|
| 359 |
+
if isinstance(out, (tuple, list)):
|
| 360 |
+
out = out[0]
|
| 361 |
+
|
| 362 |
+
out_arr = np.asarray(out.detach().cpu().numpy(), dtype=np.float32)
|
| 363 |
+
if out_arr.ndim == 1:
|
| 364 |
+
out_arr = out_arr[None, :]
|
| 365 |
+
|
| 366 |
+
for vec in out_arr:
|
| 367 |
+
embeddings.append(np.asarray(vec, dtype=np.float32))
|
| 368 |
+
|
| 369 |
+
return embeddings
|
| 370 |
+
|
| 371 |
+
def compute_query_embeddings(
|
| 372 |
+
self, query: Any, *args: Any, **kwargs: Any
|
| 373 |
+
) -> list[np.ndarray | None]:
|
| 374 |
+
return self.compute_source_embeddings([query], *args, **kwargs)
|
vendor/hyperview/server/__init__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""FastAPI server for HyperView."""
|
| 2 |
+
|
| 3 |
+
from hyperview.server.app import create_app
|
| 4 |
+
|
| 5 |
+
__all__ = ["create_app"]
|
vendor/hyperview/server/__pycache__/__init__.cpython-312.pyc
ADDED
|
Binary file (362 Bytes). View file
|
|
|
vendor/hyperview/server/__pycache__/app.cpython-312.pyc
ADDED
|
Binary file (23 kB). View file
|
|
|
vendor/hyperview/server/app.py
ADDED
|
@@ -0,0 +1,521 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""FastAPI application for HyperView."""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
from typing import Any
|
| 6 |
+
|
| 7 |
+
import numpy as np
|
| 8 |
+
from fastapi import Depends, FastAPI, HTTPException, Query
|
| 9 |
+
from fastapi.middleware.cors import CORSMiddleware
|
| 10 |
+
from fastapi.responses import JSONResponse
|
| 11 |
+
from fastapi.staticfiles import StaticFiles
|
| 12 |
+
from pydantic import BaseModel
|
| 13 |
+
|
| 14 |
+
from hyperview.core.dataset import Dataset
|
| 15 |
+
from hyperview.core.selection import (
|
| 16 |
+
OrbitViewState3D,
|
| 17 |
+
points_in_polygon,
|
| 18 |
+
select_ids_for_3d_lasso,
|
| 19 |
+
)
|
| 20 |
+
from hyperview.storage.schema import parse_layout_dimension
|
| 21 |
+
|
| 22 |
+
# Global dataset reference (set by launch())
|
| 23 |
+
_current_dataset: Dataset | None = None
|
| 24 |
+
_current_session_id: str | None = None
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class SelectionRequest(BaseModel):
|
| 28 |
+
"""Request model for selection sync."""
|
| 29 |
+
|
| 30 |
+
sample_ids: list[str]
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class LassoSelectionRequest(BaseModel):
|
| 34 |
+
"""Request model for lasso selection queries."""
|
| 35 |
+
|
| 36 |
+
layout_key: str # e.g., "openai_clip-vit-base-patch32__umap"
|
| 37 |
+
# Polygon vertices, interleaved: [x0, y0, x1, y1, ...]
|
| 38 |
+
# - 2D layouts: data-space polygon (same coordinates as /api/embeddings)
|
| 39 |
+
# - 3D layouts: screen-space polygon in CSS pixels
|
| 40 |
+
polygon: list[float]
|
| 41 |
+
# Required for 3D lasso requests.
|
| 42 |
+
view_3d: dict[str, float] | None = None
|
| 43 |
+
viewport_width: int | None = None
|
| 44 |
+
viewport_height: int | None = None
|
| 45 |
+
label_filter: str | None = None
|
| 46 |
+
offset: int = 0
|
| 47 |
+
limit: int = 100
|
| 48 |
+
include_thumbnails: bool = True
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class SampleResponse(BaseModel):
|
| 52 |
+
"""Response model for a sample."""
|
| 53 |
+
|
| 54 |
+
id: str
|
| 55 |
+
filepath: str
|
| 56 |
+
filename: str
|
| 57 |
+
label: str | None
|
| 58 |
+
thumbnail: str | None
|
| 59 |
+
metadata: dict
|
| 60 |
+
width: int | None = None
|
| 61 |
+
height: int | None = None
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class LayoutInfoResponse(BaseModel):
|
| 65 |
+
"""Response model for layout info."""
|
| 66 |
+
|
| 67 |
+
layout_key: str
|
| 68 |
+
space_key: str
|
| 69 |
+
method: str
|
| 70 |
+
geometry: str
|
| 71 |
+
count: int
|
| 72 |
+
params: dict[str, Any] | None
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class SpaceInfoResponse(BaseModel):
|
| 76 |
+
"""Response model for embedding space info."""
|
| 77 |
+
|
| 78 |
+
space_key: str
|
| 79 |
+
model_id: str
|
| 80 |
+
dim: int
|
| 81 |
+
count: int
|
| 82 |
+
provider: str
|
| 83 |
+
geometry: str
|
| 84 |
+
config: dict[str, Any] | None
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class DatasetResponse(BaseModel):
|
| 88 |
+
"""Response model for dataset info."""
|
| 89 |
+
|
| 90 |
+
name: str
|
| 91 |
+
num_samples: int
|
| 92 |
+
labels: list[str]
|
| 93 |
+
spaces: list[SpaceInfoResponse]
|
| 94 |
+
layouts: list[LayoutInfoResponse]
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class EmbeddingsResponse(BaseModel):
|
| 98 |
+
"""Response model for embeddings data (for scatter plot)."""
|
| 99 |
+
|
| 100 |
+
layout_key: str
|
| 101 |
+
geometry: str
|
| 102 |
+
ids: list[str]
|
| 103 |
+
labels: list[str | None]
|
| 104 |
+
coords: list[list[float]]
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class SimilarSampleResponse(BaseModel):
|
| 108 |
+
"""Response model for a similar sample with distance."""
|
| 109 |
+
|
| 110 |
+
id: str
|
| 111 |
+
filepath: str
|
| 112 |
+
filename: str
|
| 113 |
+
label: str | None
|
| 114 |
+
thumbnail: str | None
|
| 115 |
+
distance: float
|
| 116 |
+
metadata: dict
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
class SimilaritySearchResponse(BaseModel):
|
| 120 |
+
"""Response model for similarity search results."""
|
| 121 |
+
|
| 122 |
+
query_id: str
|
| 123 |
+
k: int
|
| 124 |
+
results: list[SimilarSampleResponse]
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
def create_app(dataset: Dataset | None = None, session_id: str | None = None) -> FastAPI:
|
| 128 |
+
"""Create the FastAPI application.
|
| 129 |
+
|
| 130 |
+
Args:
|
| 131 |
+
dataset: Optional dataset to serve. If None, uses global dataset.
|
| 132 |
+
|
| 133 |
+
Returns:
|
| 134 |
+
FastAPI application instance.
|
| 135 |
+
"""
|
| 136 |
+
global _current_dataset, _current_session_id
|
| 137 |
+
if dataset is not None:
|
| 138 |
+
_current_dataset = dataset
|
| 139 |
+
if session_id is not None:
|
| 140 |
+
_current_session_id = session_id
|
| 141 |
+
|
| 142 |
+
app = FastAPI(
|
| 143 |
+
title="HyperView",
|
| 144 |
+
description="Dataset visualization with hyperbolic embeddings",
|
| 145 |
+
version="0.1.0",
|
| 146 |
+
)
|
| 147 |
+
|
| 148 |
+
def get_dataset() -> Dataset:
|
| 149 |
+
"""Dependency that returns the current dataset or raises 404."""
|
| 150 |
+
if _current_dataset is None:
|
| 151 |
+
raise HTTPException(status_code=404, detail="No dataset loaded")
|
| 152 |
+
return _current_dataset
|
| 153 |
+
|
| 154 |
+
# CORS middleware for development
|
| 155 |
+
app.add_middleware(
|
| 156 |
+
CORSMiddleware,
|
| 157 |
+
allow_origins=["*"],
|
| 158 |
+
allow_credentials=True,
|
| 159 |
+
allow_methods=["*"],
|
| 160 |
+
allow_headers=["*"],
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
@app.get("/__hyperview__/health")
|
| 164 |
+
async def hyperview_health():
|
| 165 |
+
return {
|
| 166 |
+
"name": "hyperview",
|
| 167 |
+
"version": app.version,
|
| 168 |
+
"session_id": _current_session_id,
|
| 169 |
+
"dataset": _current_dataset.name if _current_dataset is not None else None,
|
| 170 |
+
"pid": os.getpid(),
|
| 171 |
+
}
|
| 172 |
+
|
| 173 |
+
@app.get("/api/dataset", response_model=DatasetResponse)
|
| 174 |
+
async def get_dataset_info(ds: Dataset = Depends(get_dataset)):
|
| 175 |
+
"""Get dataset metadata."""
|
| 176 |
+
spaces = ds.list_spaces()
|
| 177 |
+
space_dicts = [s.to_api_dict() for s in spaces]
|
| 178 |
+
|
| 179 |
+
layouts = ds.list_layouts()
|
| 180 |
+
layout_dicts = [layout.to_api_dict() for layout in layouts]
|
| 181 |
+
|
| 182 |
+
return DatasetResponse(
|
| 183 |
+
name=ds.name,
|
| 184 |
+
num_samples=len(ds),
|
| 185 |
+
labels=ds.labels,
|
| 186 |
+
spaces=space_dicts,
|
| 187 |
+
layouts=layout_dicts,
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
@app.get("/api/samples")
|
| 191 |
+
async def get_samples(
|
| 192 |
+
ds: Dataset = Depends(get_dataset),
|
| 193 |
+
offset: int = Query(0, ge=0),
|
| 194 |
+
limit: int = Query(100, ge=1, le=1000),
|
| 195 |
+
label: str | None = None,
|
| 196 |
+
):
|
| 197 |
+
"""Get paginated samples with thumbnails."""
|
| 198 |
+
samples, total = ds.get_samples_paginated(
|
| 199 |
+
offset=offset, limit=limit, label=label
|
| 200 |
+
)
|
| 201 |
+
|
| 202 |
+
return {
|
| 203 |
+
"total": total,
|
| 204 |
+
"offset": offset,
|
| 205 |
+
"limit": limit,
|
| 206 |
+
"samples": [s.to_api_dict(include_thumbnail=True) for s in samples],
|
| 207 |
+
}
|
| 208 |
+
|
| 209 |
+
@app.get("/api/samples/{sample_id}", response_model=SampleResponse)
|
| 210 |
+
async def get_sample(sample_id: str, ds: Dataset = Depends(get_dataset)):
|
| 211 |
+
"""Get a single sample by ID."""
|
| 212 |
+
try:
|
| 213 |
+
sample = ds[sample_id]
|
| 214 |
+
return SampleResponse(**sample.to_api_dict())
|
| 215 |
+
except KeyError:
|
| 216 |
+
raise HTTPException(status_code=404, detail=f"Sample not found: {sample_id}")
|
| 217 |
+
|
| 218 |
+
@app.post("/api/samples/batch")
|
| 219 |
+
async def get_samples_batch(request: SelectionRequest, ds: Dataset = Depends(get_dataset)):
|
| 220 |
+
"""Get multiple samples by their IDs."""
|
| 221 |
+
samples = ds.get_samples_by_ids(request.sample_ids)
|
| 222 |
+
return {"samples": [s.to_api_dict(include_thumbnail=True) for s in samples]}
|
| 223 |
+
|
| 224 |
+
@app.get("/api/embeddings", response_model=EmbeddingsResponse)
|
| 225 |
+
async def get_embeddings(ds: Dataset = Depends(get_dataset), layout_key: str | None = None):
|
| 226 |
+
"""Get embedding coordinates for visualization."""
|
| 227 |
+
layouts = ds.list_layouts()
|
| 228 |
+
if not layouts:
|
| 229 |
+
raise HTTPException(
|
| 230 |
+
status_code=400, detail="No layouts computed. Call compute_visualization() first."
|
| 231 |
+
)
|
| 232 |
+
|
| 233 |
+
# Find the requested layout
|
| 234 |
+
layout_info = None
|
| 235 |
+
if layout_key is None:
|
| 236 |
+
layout_info = next(
|
| 237 |
+
(layout for layout in layouts if parse_layout_dimension(layout.layout_key) == 2),
|
| 238 |
+
layouts[0],
|
| 239 |
+
)
|
| 240 |
+
layout_key = layout_info.layout_key
|
| 241 |
+
else:
|
| 242 |
+
layout_info = next((layout for layout in layouts if layout.layout_key == layout_key), None)
|
| 243 |
+
if layout_info is None:
|
| 244 |
+
raise HTTPException(status_code=404, detail=f"Layout not found: {layout_key}")
|
| 245 |
+
|
| 246 |
+
ids, labels, coords = ds.get_visualization_data(layout_key)
|
| 247 |
+
|
| 248 |
+
if not ids:
|
| 249 |
+
raise HTTPException(status_code=400, detail=f"No data in layout '{layout_key}'.")
|
| 250 |
+
|
| 251 |
+
return EmbeddingsResponse(
|
| 252 |
+
layout_key=layout_key,
|
| 253 |
+
geometry=layout_info.geometry,
|
| 254 |
+
ids=ids,
|
| 255 |
+
labels=labels,
|
| 256 |
+
coords=coords.tolist(),
|
| 257 |
+
)
|
| 258 |
+
|
| 259 |
+
@app.get("/api/spaces")
|
| 260 |
+
async def get_spaces(ds: Dataset = Depends(get_dataset)):
|
| 261 |
+
"""Get all embedding spaces."""
|
| 262 |
+
spaces = ds.list_spaces()
|
| 263 |
+
return {"spaces": [s.to_api_dict() for s in spaces]}
|
| 264 |
+
|
| 265 |
+
@app.get("/api/layouts")
|
| 266 |
+
async def get_layouts(ds: Dataset = Depends(get_dataset)):
|
| 267 |
+
"""Get all available layouts."""
|
| 268 |
+
layouts = ds.list_layouts()
|
| 269 |
+
return {"layouts": [layout.to_api_dict() for layout in layouts]}
|
| 270 |
+
|
| 271 |
+
@app.post("/api/selection")
|
| 272 |
+
async def sync_selection(request: SelectionRequest):
|
| 273 |
+
"""Sync selection state (for future use)."""
|
| 274 |
+
return {"status": "ok", "selected": request.sample_ids}
|
| 275 |
+
|
| 276 |
+
@app.post("/api/selection/lasso")
|
| 277 |
+
async def lasso_selection(request: LassoSelectionRequest, ds: Dataset = Depends(get_dataset)):
|
| 278 |
+
"""Compute a lasso selection over the current embeddings.
|
| 279 |
+
|
| 280 |
+
Returns a total selected count and a paginated page of selected samples.
|
| 281 |
+
|
| 282 |
+
Selection modes:
|
| 283 |
+
- 2D layouts: polygon in data space (same coordinates as /api/embeddings).
|
| 284 |
+
- 3D layouts: polygon in screen space with explicit camera + viewport.
|
| 285 |
+
"""
|
| 286 |
+
if request.offset < 0:
|
| 287 |
+
raise HTTPException(status_code=400, detail="offset must be >= 0")
|
| 288 |
+
if request.limit < 1 or request.limit > 2000:
|
| 289 |
+
raise HTTPException(status_code=400, detail="limit must be between 1 and 2000")
|
| 290 |
+
|
| 291 |
+
if len(request.polygon) < 6 or len(request.polygon) % 2 != 0:
|
| 292 |
+
raise HTTPException(
|
| 293 |
+
status_code=400,
|
| 294 |
+
detail="polygon must be an even-length list with at least 3 vertices",
|
| 295 |
+
)
|
| 296 |
+
|
| 297 |
+
layout_info = next(
|
| 298 |
+
(layout for layout in ds.list_layouts() if layout.layout_key == request.layout_key),
|
| 299 |
+
None,
|
| 300 |
+
)
|
| 301 |
+
if layout_info is None:
|
| 302 |
+
raise HTTPException(status_code=404, detail=f"Layout not found: {request.layout_key}")
|
| 303 |
+
layout_dimension = parse_layout_dimension(layout_info.layout_key)
|
| 304 |
+
|
| 305 |
+
poly = np.asarray(request.polygon, dtype=np.float32).reshape((-1, 2))
|
| 306 |
+
if not np.all(np.isfinite(poly)):
|
| 307 |
+
raise HTTPException(status_code=400, detail="polygon must contain only finite numbers")
|
| 308 |
+
|
| 309 |
+
selected_ids: list[str]
|
| 310 |
+
|
| 311 |
+
if layout_dimension == 2:
|
| 312 |
+
# Tight AABB prefilter in data space.
|
| 313 |
+
x_min = float(np.min(poly[:, 0]))
|
| 314 |
+
x_max = float(np.max(poly[:, 0]))
|
| 315 |
+
y_min = float(np.min(poly[:, 1]))
|
| 316 |
+
y_max = float(np.max(poly[:, 1]))
|
| 317 |
+
|
| 318 |
+
candidate_ids, candidate_coords = ds.get_lasso_candidates_aabb(
|
| 319 |
+
layout_key=request.layout_key,
|
| 320 |
+
x_min=x_min,
|
| 321 |
+
x_max=x_max,
|
| 322 |
+
y_min=y_min,
|
| 323 |
+
y_max=y_max,
|
| 324 |
+
label_filter=request.label_filter,
|
| 325 |
+
)
|
| 326 |
+
|
| 327 |
+
if candidate_coords.size == 0:
|
| 328 |
+
return {
|
| 329 |
+
"total": 0,
|
| 330 |
+
"offset": request.offset,
|
| 331 |
+
"limit": request.limit,
|
| 332 |
+
"sample_ids": [],
|
| 333 |
+
"samples": [],
|
| 334 |
+
}
|
| 335 |
+
|
| 336 |
+
inside_mask = points_in_polygon(candidate_coords, poly)
|
| 337 |
+
if not np.any(inside_mask):
|
| 338 |
+
return {
|
| 339 |
+
"total": 0,
|
| 340 |
+
"offset": request.offset,
|
| 341 |
+
"limit": request.limit,
|
| 342 |
+
"sample_ids": [],
|
| 343 |
+
"samples": [],
|
| 344 |
+
}
|
| 345 |
+
|
| 346 |
+
selected_ids = [candidate_ids[i] for i in np.flatnonzero(inside_mask)]
|
| 347 |
+
elif layout_dimension == 3:
|
| 348 |
+
if request.view_3d is None:
|
| 349 |
+
raise HTTPException(
|
| 350 |
+
status_code=400,
|
| 351 |
+
detail="view_3d is required for 3D lasso selection",
|
| 352 |
+
)
|
| 353 |
+
if request.viewport_width is None or request.viewport_height is None:
|
| 354 |
+
raise HTTPException(
|
| 355 |
+
status_code=400,
|
| 356 |
+
detail="viewport_width and viewport_height are required for 3D lasso selection",
|
| 357 |
+
)
|
| 358 |
+
if request.viewport_width <= 0 or request.viewport_height <= 0:
|
| 359 |
+
raise HTTPException(
|
| 360 |
+
status_code=400,
|
| 361 |
+
detail="viewport_width and viewport_height must be > 0",
|
| 362 |
+
)
|
| 363 |
+
|
| 364 |
+
try:
|
| 365 |
+
view_3d = OrbitViewState3D(**request.view_3d)
|
| 366 |
+
except Exception as exc:
|
| 367 |
+
raise HTTPException(status_code=400, detail=f"Invalid view_3d payload: {exc}")
|
| 368 |
+
|
| 369 |
+
view_vals = np.array(
|
| 370 |
+
[
|
| 371 |
+
view_3d.yaw,
|
| 372 |
+
view_3d.pitch,
|
| 373 |
+
view_3d.distance,
|
| 374 |
+
view_3d.target_x,
|
| 375 |
+
view_3d.target_y,
|
| 376 |
+
view_3d.target_z,
|
| 377 |
+
view_3d.ortho_scale,
|
| 378 |
+
],
|
| 379 |
+
dtype=np.float64,
|
| 380 |
+
)
|
| 381 |
+
if not np.all(np.isfinite(view_vals)):
|
| 382 |
+
raise HTTPException(status_code=400, detail="view_3d must contain only finite numbers")
|
| 383 |
+
if view_3d.distance <= 0 or view_3d.ortho_scale <= 0:
|
| 384 |
+
raise HTTPException(status_code=400, detail="view_3d.distance and view_3d.ortho_scale must be > 0")
|
| 385 |
+
|
| 386 |
+
ids, labels, coords = ds.get_visualization_data(request.layout_key)
|
| 387 |
+
if not ids:
|
| 388 |
+
return {
|
| 389 |
+
"total": 0,
|
| 390 |
+
"offset": request.offset,
|
| 391 |
+
"limit": request.limit,
|
| 392 |
+
"sample_ids": [],
|
| 393 |
+
"samples": [],
|
| 394 |
+
}
|
| 395 |
+
if coords.ndim != 2 or coords.shape[1] != 3:
|
| 396 |
+
raise HTTPException(
|
| 397 |
+
status_code=400,
|
| 398 |
+
detail=(
|
| 399 |
+
f"3D lasso requires a 3D layout coordinate matrix; "
|
| 400 |
+
f"got shape {coords.shape} for layout '{request.layout_key}'."
|
| 401 |
+
),
|
| 402 |
+
)
|
| 403 |
+
|
| 404 |
+
finite_mask = np.all(np.isfinite(coords), axis=1)
|
| 405 |
+
if not np.all(finite_mask):
|
| 406 |
+
finite_indices = np.flatnonzero(finite_mask)
|
| 407 |
+
if finite_indices.size == 0:
|
| 408 |
+
return {
|
| 409 |
+
"total": 0,
|
| 410 |
+
"offset": request.offset,
|
| 411 |
+
"limit": request.limit,
|
| 412 |
+
"sample_ids": [],
|
| 413 |
+
"samples": [],
|
| 414 |
+
}
|
| 415 |
+
ids = [ids[int(i)] for i in finite_indices]
|
| 416 |
+
labels = [labels[int(i)] for i in finite_indices]
|
| 417 |
+
coords = coords[finite_mask]
|
| 418 |
+
|
| 419 |
+
selected_ids = select_ids_for_3d_lasso(
|
| 420 |
+
ids=ids,
|
| 421 |
+
labels=labels,
|
| 422 |
+
coords=coords,
|
| 423 |
+
geometry=layout_info.geometry,
|
| 424 |
+
polygon=poly,
|
| 425 |
+
view=view_3d,
|
| 426 |
+
viewport_width=request.viewport_width,
|
| 427 |
+
viewport_height=request.viewport_height,
|
| 428 |
+
label_filter=request.label_filter,
|
| 429 |
+
)
|
| 430 |
+
else:
|
| 431 |
+
raise HTTPException(
|
| 432 |
+
status_code=400,
|
| 433 |
+
detail=f"Unsupported layout dimension for lasso: {layout_dimension}D",
|
| 434 |
+
)
|
| 435 |
+
|
| 436 |
+
total = len(selected_ids)
|
| 437 |
+
|
| 438 |
+
start = int(request.offset)
|
| 439 |
+
end = int(request.offset + request.limit)
|
| 440 |
+
sample_ids = selected_ids[start:end]
|
| 441 |
+
|
| 442 |
+
samples = ds.get_samples_by_ids(sample_ids)
|
| 443 |
+
sample_dicts = [s.to_api_dict(include_thumbnail=request.include_thumbnails) for s in samples]
|
| 444 |
+
|
| 445 |
+
return {
|
| 446 |
+
"total": total,
|
| 447 |
+
"offset": request.offset,
|
| 448 |
+
"limit": request.limit,
|
| 449 |
+
"sample_ids": sample_ids,
|
| 450 |
+
"samples": sample_dicts,
|
| 451 |
+
}
|
| 452 |
+
|
| 453 |
+
@app.get("/api/search/similar/{sample_id}", response_model=SimilaritySearchResponse)
|
| 454 |
+
async def search_similar(
|
| 455 |
+
sample_id: str,
|
| 456 |
+
ds: Dataset = Depends(get_dataset),
|
| 457 |
+
k: int = Query(10, ge=1, le=100),
|
| 458 |
+
space_key: str | None = None,
|
| 459 |
+
):
|
| 460 |
+
"""Return k nearest neighbors for a given sample."""
|
| 461 |
+
try:
|
| 462 |
+
similar = ds.find_similar(
|
| 463 |
+
sample_id, k=k, space_key=space_key
|
| 464 |
+
)
|
| 465 |
+
except ValueError as e:
|
| 466 |
+
raise HTTPException(status_code=400, detail=str(e))
|
| 467 |
+
except KeyError:
|
| 468 |
+
raise HTTPException(status_code=404, detail=f"Sample not found: {sample_id}")
|
| 469 |
+
|
| 470 |
+
results = []
|
| 471 |
+
for sample, distance in similar:
|
| 472 |
+
try:
|
| 473 |
+
thumbnail = sample.get_thumbnail_base64()
|
| 474 |
+
except Exception:
|
| 475 |
+
thumbnail = None
|
| 476 |
+
|
| 477 |
+
results.append(
|
| 478 |
+
SimilarSampleResponse(
|
| 479 |
+
id=sample.id,
|
| 480 |
+
filepath=sample.filepath,
|
| 481 |
+
filename=sample.filename,
|
| 482 |
+
label=sample.label,
|
| 483 |
+
thumbnail=thumbnail,
|
| 484 |
+
distance=distance,
|
| 485 |
+
metadata=sample.metadata,
|
| 486 |
+
)
|
| 487 |
+
)
|
| 488 |
+
|
| 489 |
+
return SimilaritySearchResponse(
|
| 490 |
+
query_id=sample_id,
|
| 491 |
+
k=k,
|
| 492 |
+
results=results,
|
| 493 |
+
)
|
| 494 |
+
|
| 495 |
+
@app.get("/api/thumbnail/{sample_id}")
|
| 496 |
+
async def get_thumbnail(sample_id: str, ds: Dataset = Depends(get_dataset)):
|
| 497 |
+
"""Get thumbnail image for a sample."""
|
| 498 |
+
try:
|
| 499 |
+
sample = ds[sample_id]
|
| 500 |
+
thumbnail_b64 = sample.get_thumbnail_base64()
|
| 501 |
+
return JSONResponse({"thumbnail": thumbnail_b64})
|
| 502 |
+
except KeyError:
|
| 503 |
+
raise HTTPException(status_code=404, detail=f"Sample not found: {sample_id}")
|
| 504 |
+
|
| 505 |
+
# Serve static frontend files
|
| 506 |
+
static_dir = Path(__file__).parent / "static"
|
| 507 |
+
if static_dir.exists():
|
| 508 |
+
app.mount("/", StaticFiles(directory=str(static_dir), html=True), name="static")
|
| 509 |
+
else:
|
| 510 |
+
# Fallback: serve a simple HTML page
|
| 511 |
+
@app.get("/")
|
| 512 |
+
async def root():
|
| 513 |
+
return {"message": "HyperView API", "docs": "/docs"}
|
| 514 |
+
|
| 515 |
+
return app
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
def set_dataset(dataset: Dataset) -> None:
|
| 519 |
+
"""Set the global dataset for the server."""
|
| 520 |
+
global _current_dataset
|
| 521 |
+
_current_dataset = dataset
|
vendor/hyperview/server/static/404.html
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html><!--0EHglxORKBJP1WlpL6tAy--><html lang="en" class="h-full"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="stylesheet" href="/_next/static/chunks/462c5e072cd14e02.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="/_next/static/chunks/86c1fc4cf542f408.js"/><script src="/_next/static/chunks/f29dd35a99c216ea.js" async=""></script><script src="/_next/static/chunks/567993cf36cd4ab1.js" async=""></script><script src="/_next/static/chunks/turbopack-cb59e03a04a579d1.js" async=""></script><script src="/_next/static/chunks/e954ba82c0a04100.js" async=""></script><meta name="robots" content="noindex"/><meta name="next-size-adjust" content=""/><title>404: This page could not be found.</title><title>HyperView</title><meta name="description" content="Dataset visualization with hyperbolic embeddings"/><script src="/_next/static/chunks/a6dad97d9634a72d.js" noModule=""></script></head><body class="inter_c50830d4-module__9YwRda__className antialiased h-full"><div hidden=""><!--$--><!--/$--></div><div style="font-family:system-ui,"Segoe UI",Roboto,Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji";height:100vh;text-align:center;display:flex;flex-direction:column;align-items:center;justify-content:center"><div><style>body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}</style><h1 class="next-error-h1" style="display:inline-block;margin:0 20px 0 0;padding:0 23px 0 0;font-size:24px;font-weight:500;vertical-align:top;line-height:49px">404</h1><div style="display:inline-block"><h2 style="font-size:14px;font-weight:400;line-height:49px;margin:0">This page could not be found.</h2></div></div></div><!--$--><!--/$--><script src="/_next/static/chunks/86c1fc4cf542f408.js" id="_R_" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"1:\"$Sreact.fragment\"\n2:I[32035,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"default\"]\n3:I[91168,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"default\"]\n4:I[64381,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"OutletBoundary\"]\n5:\"$Sreact.suspense\"\n7:I[64381,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"ViewportBoundary\"]\n9:I[64381,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"MetadataBoundary\"]\nb:I[75115,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"default\"]\n:HL[\"/_next/static/chunks/462c5e072cd14e02.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"0EHglxORKBJP1WlpL6tAy\",\"c\":[\"\",\"_not-found\",\"\"],\"q\":\"\",\"i\":false,\"f\":[[[\"\",{\"children\":[\"/_not-found\",{\"children\":[\"__PAGE__\",{}]}]},\"$undefined\",\"$undefined\",true],[[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/_next/static/chunks/462c5e072cd14e02.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}],[\"$\",\"script\",\"script-0\",{\"src\":\"/_next/static/chunks/e954ba82c0a04100.js\",\"async\":true,\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"className\":\"h-full\",\"children\":[\"$\",\"body\",null,{\"className\":\"inter_c50830d4-module__9YwRda__className antialiased h-full\",\"children\":[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":404}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],[]],\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]}]}]]}],{\"children\":[[\"$\",\"$1\",\"c\",{\"children\":[null,[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]]}],{\"children\":[[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":\"$0:f:0:1:0:props:children:1:props:children:props:children:props:notFound:0:1:props:style\",\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":\"$0:f:0:1:0:props:children:1:props:children:props:children:props:notFound:0:1:props:children:props:children:1:props:style\",\"children\":404}],[\"$\",\"div\",null,{\"style\":\"$0:f:0:1:0:props:children:1:props:children:props:children:props:notFound:0:1:props:children:props:children:2:props:style\",\"children\":[\"$\",\"h2\",null,{\"style\":\"$0:f:0:1:0:props:children:1:props:children:props:children:props:notFound:0:1:props:children:props:children:2:props:children:props:style\",\"children\":\"This page could not be found.\"}]}]]}]}]],null,[\"$\",\"$L4\",null,{\"children\":[\"$\",\"$5\",null,{\"name\":\"Next.MetadataOutlet\",\"children\":\"$@6\"}]}]]}],{},null,false,false]},null,false,false]},null,false,false],[\"$\",\"$1\",\"h\",{\"children\":[[\"$\",\"meta\",null,{\"name\":\"robots\",\"content\":\"noindex\"}],[\"$\",\"$L7\",null,{\"children\":\"$L8\"}],[\"$\",\"div\",null,{\"hidden\":true,\"children\":[\"$\",\"$L9\",null,{\"children\":[\"$\",\"$5\",null,{\"name\":\"Next.Metadata\",\"children\":\"$La\"}]}]}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\",\"content\":\"\"}]]}],false]],\"m\":\"$undefined\",\"G\":[\"$b\",\"$undefined\"],\"S\":true}\n"])</script><script>self.__next_f.push([1,"8:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"meta\",\"1\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n"])</script><script>self.__next_f.push([1,"6:null\na:[[\"$\",\"title\",\"0\",{\"children\":\"HyperView\"}],[\"$\",\"meta\",\"1\",{\"name\":\"description\",\"content\":\"Dataset visualization with hyperbolic embeddings\"}]]\n"])</script></body></html>
|
vendor/hyperview/server/static/404/index.html
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html><!--0EHglxORKBJP1WlpL6tAy--><html lang="en" class="h-full"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width, initial-scale=1"/><link rel="stylesheet" href="/_next/static/chunks/462c5e072cd14e02.css" data-precedence="next"/><link rel="preload" as="script" fetchPriority="low" href="/_next/static/chunks/86c1fc4cf542f408.js"/><script src="/_next/static/chunks/f29dd35a99c216ea.js" async=""></script><script src="/_next/static/chunks/567993cf36cd4ab1.js" async=""></script><script src="/_next/static/chunks/turbopack-cb59e03a04a579d1.js" async=""></script><script src="/_next/static/chunks/e954ba82c0a04100.js" async=""></script><meta name="robots" content="noindex"/><meta name="next-size-adjust" content=""/><title>404: This page could not be found.</title><title>HyperView</title><meta name="description" content="Dataset visualization with hyperbolic embeddings"/><script src="/_next/static/chunks/a6dad97d9634a72d.js" noModule=""></script></head><body class="inter_c50830d4-module__9YwRda__className antialiased h-full"><div hidden=""><!--$--><!--/$--></div><div style="font-family:system-ui,"Segoe UI",Roboto,Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji";height:100vh;text-align:center;display:flex;flex-direction:column;align-items:center;justify-content:center"><div><style>body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}</style><h1 class="next-error-h1" style="display:inline-block;margin:0 20px 0 0;padding:0 23px 0 0;font-size:24px;font-weight:500;vertical-align:top;line-height:49px">404</h1><div style="display:inline-block"><h2 style="font-size:14px;font-weight:400;line-height:49px;margin:0">This page could not be found.</h2></div></div></div><!--$--><!--/$--><script src="/_next/static/chunks/86c1fc4cf542f408.js" id="_R_" async=""></script><script>(self.__next_f=self.__next_f||[]).push([0])</script><script>self.__next_f.push([1,"1:\"$Sreact.fragment\"\n2:I[32035,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"default\"]\n3:I[91168,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"default\"]\n4:I[64381,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"OutletBoundary\"]\n5:\"$Sreact.suspense\"\n7:I[64381,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"ViewportBoundary\"]\n9:I[64381,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"MetadataBoundary\"]\nb:I[75115,[\"/_next/static/chunks/e954ba82c0a04100.js\"],\"default\"]\n:HL[\"/_next/static/chunks/462c5e072cd14e02.css\",\"style\"]\n"])</script><script>self.__next_f.push([1,"0:{\"P\":null,\"b\":\"0EHglxORKBJP1WlpL6tAy\",\"c\":[\"\",\"_not-found\",\"\"],\"q\":\"\",\"i\":false,\"f\":[[[\"\",{\"children\":[\"/_not-found\",{\"children\":[\"__PAGE__\",{}]}]},\"$undefined\",\"$undefined\",true],[[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"link\",\"0\",{\"rel\":\"stylesheet\",\"href\":\"/_next/static/chunks/462c5e072cd14e02.css\",\"precedence\":\"next\",\"crossOrigin\":\"$undefined\",\"nonce\":\"$undefined\"}],[\"$\",\"script\",\"script-0\",{\"src\":\"/_next/static/chunks/e954ba82c0a04100.js\",\"async\":true,\"nonce\":\"$undefined\"}]],[\"$\",\"html\",null,{\"lang\":\"en\",\"className\":\"h-full\",\"children\":[\"$\",\"body\",null,{\"className\":\"inter_c50830d4-module__9YwRda__className antialiased h-full\",\"children\":[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":{\"fontFamily\":\"system-ui,\\\"Segoe UI\\\",Roboto,Helvetica,Arial,sans-serif,\\\"Apple Color Emoji\\\",\\\"Segoe UI Emoji\\\"\",\"height\":\"100vh\",\"textAlign\":\"center\",\"display\":\"flex\",\"flexDirection\":\"column\",\"alignItems\":\"center\",\"justifyContent\":\"center\"},\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":{\"display\":\"inline-block\",\"margin\":\"0 20px 0 0\",\"padding\":\"0 23px 0 0\",\"fontSize\":24,\"fontWeight\":500,\"verticalAlign\":\"top\",\"lineHeight\":\"49px\"},\"children\":404}],[\"$\",\"div\",null,{\"style\":{\"display\":\"inline-block\"},\"children\":[\"$\",\"h2\",null,{\"style\":{\"fontSize\":14,\"fontWeight\":400,\"lineHeight\":\"49px\",\"margin\":0},\"children\":\"This page could not be found.\"}]}]]}]}]],[]],\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]}]}]]}],{\"children\":[[\"$\",\"$1\",\"c\",{\"children\":[null,[\"$\",\"$L2\",null,{\"parallelRouterKey\":\"children\",\"error\":\"$undefined\",\"errorStyles\":\"$undefined\",\"errorScripts\":\"$undefined\",\"template\":[\"$\",\"$L3\",null,{}],\"templateStyles\":\"$undefined\",\"templateScripts\":\"$undefined\",\"notFound\":\"$undefined\",\"forbidden\":\"$undefined\",\"unauthorized\":\"$undefined\"}]]}],{\"children\":[[\"$\",\"$1\",\"c\",{\"children\":[[[\"$\",\"title\",null,{\"children\":\"404: This page could not be found.\"}],[\"$\",\"div\",null,{\"style\":\"$0:f:0:1:0:props:children:1:props:children:props:children:props:notFound:0:1:props:style\",\"children\":[\"$\",\"div\",null,{\"children\":[[\"$\",\"style\",null,{\"dangerouslySetInnerHTML\":{\"__html\":\"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}\"}}],[\"$\",\"h1\",null,{\"className\":\"next-error-h1\",\"style\":\"$0:f:0:1:0:props:children:1:props:children:props:children:props:notFound:0:1:props:children:props:children:1:props:style\",\"children\":404}],[\"$\",\"div\",null,{\"style\":\"$0:f:0:1:0:props:children:1:props:children:props:children:props:notFound:0:1:props:children:props:children:2:props:style\",\"children\":[\"$\",\"h2\",null,{\"style\":\"$0:f:0:1:0:props:children:1:props:children:props:children:props:notFound:0:1:props:children:props:children:2:props:children:props:style\",\"children\":\"This page could not be found.\"}]}]]}]}]],null,[\"$\",\"$L4\",null,{\"children\":[\"$\",\"$5\",null,{\"name\":\"Next.MetadataOutlet\",\"children\":\"$@6\"}]}]]}],{},null,false,false]},null,false,false]},null,false,false],[\"$\",\"$1\",\"h\",{\"children\":[[\"$\",\"meta\",null,{\"name\":\"robots\",\"content\":\"noindex\"}],[\"$\",\"$L7\",null,{\"children\":\"$L8\"}],[\"$\",\"div\",null,{\"hidden\":true,\"children\":[\"$\",\"$L9\",null,{\"children\":[\"$\",\"$5\",null,{\"name\":\"Next.Metadata\",\"children\":\"$La\"}]}]}],[\"$\",\"meta\",null,{\"name\":\"next-size-adjust\",\"content\":\"\"}]]}],false]],\"m\":\"$undefined\",\"G\":[\"$b\",\"$undefined\"],\"S\":true}\n"])</script><script>self.__next_f.push([1,"8:[[\"$\",\"meta\",\"0\",{\"charSet\":\"utf-8\"}],[\"$\",\"meta\",\"1\",{\"name\":\"viewport\",\"content\":\"width=device-width, initial-scale=1\"}]]\n"])</script><script>self.__next_f.push([1,"6:null\na:[[\"$\",\"title\",\"0\",{\"children\":\"HyperView\"}],[\"$\",\"meta\",\"1\",{\"name\":\"description\",\"content\":\"Dataset visualization with hyperbolic embeddings\"}]]\n"])</script></body></html>
|
vendor/hyperview/server/static/__next.__PAGE__.txt
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1:"$Sreact.fragment"
|
| 2 |
+
2:I[73440,["/_next/static/chunks/e954ba82c0a04100.js"],"ClientPageRoot"]
|
| 3 |
+
3:I[50912,["/_next/static/chunks/bdfca692596a10e2.js","/_next/static/chunks/8d5a2ef3447cb3ee.js"],"default"]
|
| 4 |
+
6:I[64381,["/_next/static/chunks/e954ba82c0a04100.js"],"OutletBoundary"]
|
| 5 |
+
7:"$Sreact.suspense"
|
| 6 |
+
0:{"buildId":"0EHglxORKBJP1WlpL6tAy","rsc":["$","$1","c",{"children":[["$","$L2",null,{"Component":"$3","serverProvidedParams":{"searchParams":{},"params":{},"promises":["$@4","$@5"]}}],[["$","script","script-0",{"src":"/_next/static/chunks/bdfca692596a10e2.js","async":true}],["$","script","script-1",{"src":"/_next/static/chunks/8d5a2ef3447cb3ee.js","async":true}]],["$","$L6",null,{"children":["$","$7",null,{"name":"Next.MetadataOutlet","children":"$@8"}]}]]}],"loading":null,"isPartial":false}
|
| 7 |
+
4:{}
|
| 8 |
+
5:"$0:rsc:props:children:0:props:serverProvidedParams:params"
|
| 9 |
+
8:null
|
vendor/hyperview/server/static/__next._full.txt
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1:"$Sreact.fragment"
|
| 2 |
+
2:I[32035,["/_next/static/chunks/e954ba82c0a04100.js"],"default"]
|
| 3 |
+
3:I[91168,["/_next/static/chunks/e954ba82c0a04100.js"],"default"]
|
| 4 |
+
4:I[73440,["/_next/static/chunks/e954ba82c0a04100.js"],"ClientPageRoot"]
|
| 5 |
+
5:I[50912,["/_next/static/chunks/bdfca692596a10e2.js","/_next/static/chunks/8d5a2ef3447cb3ee.js"],"default"]
|
| 6 |
+
8:I[64381,["/_next/static/chunks/e954ba82c0a04100.js"],"OutletBoundary"]
|
| 7 |
+
9:"$Sreact.suspense"
|
| 8 |
+
b:I[64381,["/_next/static/chunks/e954ba82c0a04100.js"],"ViewportBoundary"]
|
| 9 |
+
d:I[64381,["/_next/static/chunks/e954ba82c0a04100.js"],"MetadataBoundary"]
|
| 10 |
+
f:I[75115,["/_next/static/chunks/e954ba82c0a04100.js"],"default"]
|
| 11 |
+
:HL["/_next/static/chunks/462c5e072cd14e02.css","style"]
|
| 12 |
+
:HL["/_next/static/media/83afe278b6a6bb3c-s.p.3a6ba036.woff2","font",{"crossOrigin":"","type":"font/woff2"}]
|
| 13 |
+
0:{"P":null,"b":"0EHglxORKBJP1WlpL6tAy","c":["",""],"q":"","i":false,"f":[[["",{"children":["__PAGE__",{}]},"$undefined","$undefined",true],[["$","$1","c",{"children":[[["$","link","0",{"rel":"stylesheet","href":"/_next/static/chunks/462c5e072cd14e02.css","precedence":"next","crossOrigin":"$undefined","nonce":"$undefined"}]],["$","html",null,{"lang":"en","className":"h-full","children":["$","body",null,{"className":"inter_c50830d4-module__9YwRda__className antialiased h-full","children":["$","$L2",null,{"parallelRouterKey":"children","error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L3",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":404}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],[]],"forbidden":"$undefined","unauthorized":"$undefined"}]}]}]]}],{"children":[["$","$1","c",{"children":[["$","$L4",null,{"Component":"$5","serverProvidedParams":{"searchParams":{},"params":{},"promises":["$@6","$@7"]}}],[["$","script","script-0",{"src":"/_next/static/chunks/bdfca692596a10e2.js","async":true,"nonce":"$undefined"}],["$","script","script-1",{"src":"/_next/static/chunks/8d5a2ef3447cb3ee.js","async":true,"nonce":"$undefined"}]],["$","$L8",null,{"children":["$","$9",null,{"name":"Next.MetadataOutlet","children":"$@a"}]}]]}],{},null,false,false]},null,false,false],["$","$1","h",{"children":[null,["$","$Lb",null,{"children":"$Lc"}],["$","div",null,{"hidden":true,"children":["$","$Ld",null,{"children":["$","$9",null,{"name":"Next.Metadata","children":"$Le"}]}]}],["$","meta",null,{"name":"next-size-adjust","content":""}]]}],false]],"m":"$undefined","G":["$f",[]],"S":true}
|
| 14 |
+
6:{}
|
| 15 |
+
7:"$0:f:0:1:1:children:0:props:children:0:props:serverProvidedParams:params"
|
| 16 |
+
c:[["$","meta","0",{"charSet":"utf-8"}],["$","meta","1",{"name":"viewport","content":"width=device-width, initial-scale=1"}]]
|
| 17 |
+
a:null
|
| 18 |
+
e:[["$","title","0",{"children":"HyperView"}],["$","meta","1",{"name":"description","content":"Dataset visualization with hyperbolic embeddings"}]]
|
vendor/hyperview/server/static/__next._head.txt
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1:"$Sreact.fragment"
|
| 2 |
+
2:I[64381,["/_next/static/chunks/e954ba82c0a04100.js"],"ViewportBoundary"]
|
| 3 |
+
3:I[64381,["/_next/static/chunks/e954ba82c0a04100.js"],"MetadataBoundary"]
|
| 4 |
+
4:"$Sreact.suspense"
|
| 5 |
+
0:{"buildId":"0EHglxORKBJP1WlpL6tAy","rsc":["$","$1","h",{"children":[null,["$","$L2",null,{"children":[["$","meta","0",{"charSet":"utf-8"}],["$","meta","1",{"name":"viewport","content":"width=device-width, initial-scale=1"}]]}],["$","div",null,{"hidden":true,"children":["$","$L3",null,{"children":["$","$4",null,{"name":"Next.Metadata","children":[["$","title","0",{"children":"HyperView"}],["$","meta","1",{"name":"description","content":"Dataset visualization with hyperbolic embeddings"}]]}]}]}],["$","meta",null,{"name":"next-size-adjust","content":""}]]}],"loading":null,"isPartial":false}
|
vendor/hyperview/server/static/__next._index.txt
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
1:"$Sreact.fragment"
|
| 2 |
+
2:I[32035,["/_next/static/chunks/e954ba82c0a04100.js"],"default"]
|
| 3 |
+
3:I[91168,["/_next/static/chunks/e954ba82c0a04100.js"],"default"]
|
| 4 |
+
:HL["/_next/static/chunks/462c5e072cd14e02.css","style"]
|
| 5 |
+
0:{"buildId":"0EHglxORKBJP1WlpL6tAy","rsc":["$","$1","c",{"children":[[["$","link","0",{"rel":"stylesheet","href":"/_next/static/chunks/462c5e072cd14e02.css","precedence":"next"}]],["$","html",null,{"lang":"en","className":"h-full","children":["$","body",null,{"className":"inter_c50830d4-module__9YwRda__className antialiased h-full","children":["$","$L2",null,{"parallelRouterKey":"children","template":["$","$L3",null,{}],"notFound":[[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":404}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],[]]}]}]}]]}],"loading":null,"isPartial":false}
|
vendor/hyperview/server/static/__next._tree.txt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
:HL["/_next/static/chunks/462c5e072cd14e02.css","style"]
|
| 2 |
+
:HL["/_next/static/media/83afe278b6a6bb3c-s.p.3a6ba036.woff2","font",{"crossOrigin":"","type":"font/woff2"}]
|
| 3 |
+
0:{"buildId":"0EHglxORKBJP1WlpL6tAy","tree":{"name":"","paramType":null,"paramKey":"","hasRuntimePrefetch":false,"slots":{"children":{"name":"__PAGE__","paramType":null,"paramKey":"__PAGE__","hasRuntimePrefetch":false,"slots":null,"isRootLayout":false}},"isRootLayout":true},"staleTime":300}
|
vendor/hyperview/server/static/_next/static/0EHglxORKBJP1WlpL6tAy/_buildManifest.js
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
self.__BUILD_MANIFEST = {
|
| 2 |
+
"__rewrites": {
|
| 3 |
+
"afterFiles": [
|
| 4 |
+
{
|
| 5 |
+
"source": "/api/:path*"
|
| 6 |
+
}
|
| 7 |
+
],
|
| 8 |
+
"beforeFiles": [],
|
| 9 |
+
"fallback": []
|
| 10 |
+
},
|
| 11 |
+
"sortedPages": [
|
| 12 |
+
"/_app",
|
| 13 |
+
"/_error"
|
| 14 |
+
]
|
| 15 |
+
};self.__BUILD_MANIFEST_CB && self.__BUILD_MANIFEST_CB()
|
vendor/hyperview/server/static/_next/static/0EHglxORKBJP1WlpL6tAy/_clientMiddlewareManifest.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
[]
|
vendor/hyperview/server/static/_next/static/0EHglxORKBJP1WlpL6tAy/_ssgManifest.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
self.__SSG_MANIFEST=new Set([]);self.__SSG_MANIFEST_CB&&self.__SSG_MANIFEST_CB()
|
vendor/hyperview/server/static/_next/static/chunks/462c5e072cd14e02.css
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
@font-face{font-family:Inter;font-style:normal;font-weight:400;font-display:swap;src:url(../media/2c55a0e60120577a-s.2a48534a.woff2)format("woff2");unicode-range:U+460-52F,U+1C80-1C8A,U+20B4,U+2DE0-2DFF,U+A640-A69F,U+FE2E-FE2F}@font-face{font-family:Inter;font-style:normal;font-weight:400;font-display:swap;src:url(../media/9c72aa0f40e4eef8-s.18a48cbc.woff2)format("woff2");unicode-range:U+301,U+400-45F,U+490-491,U+4B0-4B1,U+2116}@font-face{font-family:Inter;font-style:normal;font-weight:400;font-display:swap;src:url(../media/ad66f9afd8947f86-s.7a40eb73.woff2)format("woff2");unicode-range:U+1F??}@font-face{font-family:Inter;font-style:normal;font-weight:400;font-display:swap;src:url(../media/5476f68d60460930-s.c995e352.woff2)format("woff2");unicode-range:U+370-377,U+37A-37F,U+384-38A,U+38C,U+38E-3A1,U+3A3-3FF}@font-face{font-family:Inter;font-style:normal;font-weight:400;font-display:swap;src:url(../media/2bbe8d2671613f1f-s.76dcb0b2.woff2)format("woff2");unicode-range:U+102-103,U+110-111,U+128-129,U+168-169,U+1A0-1A1,U+1AF-1B0,U+300-301,U+303-304,U+308-309,U+323,U+329,U+1EA0-1EF9,U+20AB}@font-face{font-family:Inter;font-style:normal;font-weight:400;font-display:swap;src:url(../media/1bffadaabf893a1e-s.7cd81963.woff2)format("woff2");unicode-range:U+100-2BA,U+2BD-2C5,U+2C7-2CC,U+2CE-2D7,U+2DD-2FF,U+304,U+308,U+329,U+1D00-1DBF,U+1E00-1E9F,U+1EF2-1EFF,U+2020,U+20A0-20AB,U+20AD-20C0,U+2113,U+2C60-2C7F,U+A720-A7FF}@font-face{font-family:Inter;font-style:normal;font-weight:400;font-display:swap;src:url(../media/83afe278b6a6bb3c-s.p.3a6ba036.woff2)format("woff2");unicode-range:U+??,U+131,U+152-153,U+2BB-2BC,U+2C6,U+2DA,U+2DC,U+304,U+308,U+329,U+2000-206F,U+20AC,U+2122,U+2191,U+2193,U+2212,U+2215,U+FEFF,U+FFFD}@font-face{font-family:Inter;font-style:normal;font-weight:500;font-display:swap;src:url(../media/2c55a0e60120577a-s.2a48534a.woff2)format("woff2");unicode-range:U+460-52F,U+1C80-1C8A,U+20B4,U+2DE0-2DFF,U+A640-A69F,U+FE2E-FE2F}@font-face{font-family:Inter;font-style:normal;font-weight:500;font-display:swap;src:url(../media/9c72aa0f40e4eef8-s.18a48cbc.woff2)format("woff2");unicode-range:U+301,U+400-45F,U+490-491,U+4B0-4B1,U+2116}@font-face{font-family:Inter;font-style:normal;font-weight:500;font-display:swap;src:url(../media/ad66f9afd8947f86-s.7a40eb73.woff2)format("woff2");unicode-range:U+1F??}@font-face{font-family:Inter;font-style:normal;font-weight:500;font-display:swap;src:url(../media/5476f68d60460930-s.c995e352.woff2)format("woff2");unicode-range:U+370-377,U+37A-37F,U+384-38A,U+38C,U+38E-3A1,U+3A3-3FF}@font-face{font-family:Inter;font-style:normal;font-weight:500;font-display:swap;src:url(../media/2bbe8d2671613f1f-s.76dcb0b2.woff2)format("woff2");unicode-range:U+102-103,U+110-111,U+128-129,U+168-169,U+1A0-1A1,U+1AF-1B0,U+300-301,U+303-304,U+308-309,U+323,U+329,U+1EA0-1EF9,U+20AB}@font-face{font-family:Inter;font-style:normal;font-weight:500;font-display:swap;src:url(../media/1bffadaabf893a1e-s.7cd81963.woff2)format("woff2");unicode-range:U+100-2BA,U+2BD-2C5,U+2C7-2CC,U+2CE-2D7,U+2DD-2FF,U+304,U+308,U+329,U+1D00-1DBF,U+1E00-1E9F,U+1EF2-1EFF,U+2020,U+20A0-20AB,U+20AD-20C0,U+2113,U+2C60-2C7F,U+A720-A7FF}@font-face{font-family:Inter;font-style:normal;font-weight:500;font-display:swap;src:url(../media/83afe278b6a6bb3c-s.p.3a6ba036.woff2)format("woff2");unicode-range:U+??,U+131,U+152-153,U+2BB-2BC,U+2C6,U+2DA,U+2DC,U+304,U+308,U+329,U+2000-206F,U+20AC,U+2122,U+2191,U+2193,U+2212,U+2215,U+FEFF,U+FFFD}@font-face{font-family:Inter Fallback;src:local(Arial);ascent-override:90.44%;descent-override:22.52%;line-gap-override:0.0%;size-adjust:107.12%}.inter_c50830d4-module__9YwRda__className{font-family:Inter,Inter Fallback;font-style:normal}
|
| 2 |
+
.dv-scrollable{position:relative;overflow:hidden}.dv-scrollable .dv-scrollbar-horizontal{will-change:background-color,transform;backface-visibility:hidden;background-color:#0000;border-radius:2px;height:4px;transition:background-color 1s ease-in-out;position:absolute;bottom:0;left:0;transform:translate(0,0)}.dv-scrollable:hover .dv-scrollbar-horizontal,.dv-scrollable.dv-scrollable-resizing .dv-scrollbar-horizontal,.dv-scrollable.dv-scrollable-scrolling .dv-scrollbar-horizontal{background-color:var(--dv-scrollbar-background-color,#ffffff40)}.dv-svg{fill:currentColor;stroke:currentColor;stroke-width:0;line-height:1;display:inline-block}.dockview-theme-dark{--dv-paneview-active-outline-color:dodgerblue;--dv-tabs-and-actions-container-font-size:13px;--dv-tabs-and-actions-container-height:35px;--dv-drag-over-background-color:#53595d80;--dv-drag-over-border-color:transparent;--dv-tabs-container-scrollbar-color:#888;--dv-icon-hover-background-color:#5a5d5e4f;--dv-floating-box-shadow:8px 8px 8px 0px #53595d80;--dv-overlay-z-index:999;--dv-tab-font-size:inherit;--dv-border-radius:0px;--dv-tab-margin:0;--dv-sash-color:transparent;--dv-active-sash-color:transparent;--dv-active-sash-transition-duration:.1s;--dv-active-sash-transition-delay:.5s;--dv-group-view-background-color:#1e1e1e;--dv-tabs-and-actions-container-background-color:#252526;--dv-activegroup-visiblepanel-tab-background-color:#1e1e1e;--dv-activegroup-hiddenpanel-tab-background-color:#2d2d2d;--dv-inactivegroup-visiblepanel-tab-background-color:#1e1e1e;--dv-inactivegroup-hiddenpanel-tab-background-color:#2d2d2d;--dv-tab-divider-color:#1e1e1e;--dv-activegroup-visiblepanel-tab-color:white;--dv-activegroup-hiddenpanel-tab-color:#969696;--dv-inactivegroup-visiblepanel-tab-color:#8f8f8f;--dv-inactivegroup-hiddenpanel-tab-color:#626262;--dv-separator-border:#444;--dv-paneview-header-border-color:#ccc3}.dockview-theme-dark .dv-drop-target-container .dv-drop-target-anchor.dv-drop-target-anchor-container-changed{opacity:0;transition:none}.dockview-theme-light{--dv-paneview-active-outline-color:dodgerblue;--dv-tabs-and-actions-container-font-size:13px;--dv-tabs-and-actions-container-height:35px;--dv-drag-over-background-color:#53595d80;--dv-drag-over-border-color:transparent;--dv-tabs-container-scrollbar-color:#888;--dv-icon-hover-background-color:#5a5d5e4f;--dv-floating-box-shadow:8px 8px 8px 0px #53595d80;--dv-overlay-z-index:999;--dv-tab-font-size:inherit;--dv-border-radius:0px;--dv-tab-margin:0;--dv-sash-color:transparent;--dv-active-sash-color:transparent;--dv-active-sash-transition-duration:.1s;--dv-active-sash-transition-delay:.5s;--dv-group-view-background-color:white;--dv-tabs-and-actions-container-background-color:#f3f3f3;--dv-activegroup-visiblepanel-tab-background-color:white;--dv-activegroup-hiddenpanel-tab-background-color:#ececec;--dv-inactivegroup-visiblepanel-tab-background-color:white;--dv-inactivegroup-hiddenpanel-tab-background-color:#ececec;--dv-tab-divider-color:white;--dv-activegroup-visiblepanel-tab-color:#333;--dv-activegroup-hiddenpanel-tab-color:#333333b3;--dv-inactivegroup-visiblepanel-tab-color:#333333b3;--dv-inactivegroup-hiddenpanel-tab-color:#33333359;--dv-separator-border:#80808059;--dv-paneview-header-border-color:#333;--dv-scrollbar-background-color:#00000040}.dockview-theme-light .dv-drop-target-container .dv-drop-target-anchor.dv-drop-target-anchor-container-changed{opacity:0;transition:none}.dockview-theme-vs{--dv-paneview-active-outline-color:dodgerblue;--dv-drag-over-background-color:#53595d80;--dv-drag-over-border-color:transparent;--dv-tabs-container-scrollbar-color:#888;--dv-icon-hover-background-color:#5a5d5e4f;--dv-floating-box-shadow:8px 8px 8px 0px #53595d80;--dv-overlay-z-index:999;--dv-tab-font-size:inherit;--dv-border-radius:0px;--dv-tab-margin:0;--dv-sash-color:transparent;--dv-active-sash-color:transparent;--dv-active-sash-transition-duration:.1s;--dv-active-sash-transition-delay:.5s;--dv-group-view-background-color:#1e1e1e;--dv-tabs-and-actions-container-background-color:#2d2d30;--dv-activegroup-visiblepanel-tab-background-color:#007acc;--dv-activegroup-hiddenpanel-tab-background-color:#2d2d2d;--dv-inactivegroup-visiblepanel-tab-background-color:#3f3f46;--dv-inactivegroup-hiddenpanel-tab-background-color:#2d2d2d;--dv-tab-divider-color:#1e1e1e;--dv-activegroup-hiddenpanel-tab-color:white;--dv-inactivegroup-visiblepanel-tab-color:white;--dv-inactivegroup-hiddenpanel-tab-color:white;--dv-separator-border:#444;--dv-paneview-header-border-color:#ccc3;--dv-tabs-and-actions-container-height:20px;--dv-tabs-and-actions-container-font-size:11px;--dv-activegroup-visiblepanel-tab-color:white}.dockview-theme-vs .dv-drop-target-container .dv-drop-target-anchor.dv-drop-target-anchor-container-changed{opacity:0;transition:none}.dockview-theme-vs .dv-groupview.dv-active-group>.dv-tabs-and-actions-container{box-sizing:content-box;border-bottom:2px solid var(--dv-activegroup-visiblepanel-tab-background-color)}.dockview-theme-vs .dv-groupview.dv-active-group>.dv-tabs-and-actions-container .dv-tab.dv-active-tab{border-top:2px solid var(--dv-activegroup-visiblepanel-tab-background-color)}.dockview-theme-vs .dv-groupview.dv-active-group>.dv-tabs-and-actions-container .dv-tab.dv-inactive-tab{border-top:2px solid var(--dv-activegroup-hiddenpanel-tab-background-color)}.dockview-theme-vs .dv-groupview.dv-inactive-group>.dv-tabs-and-actions-container{box-sizing:content-box;border-bottom:2px solid var(--dv-inactivegroup-visiblepanel-tab-background-color)}.dockview-theme-vs .dv-groupview.dv-inactive-group>.dv-tabs-and-actions-container .dv-tab.dv-active-tab{border-top:2px solid var(--dv-inactivegroup-visiblepanel-tab-background-color)}.dockview-theme-vs .dv-groupview.dv-inactive-group>.dv-tabs-and-actions-container .dv-tab.dv-inactive-tab{border-top:2px solid var(--dv-inactivegroup-hiddenpanel-tab-background-color)}.dockview-theme-abyss{--dv-paneview-active-outline-color:#596f99;--dv-tabs-and-actions-container-font-size:13px;--dv-tabs-and-actions-container-height:35px;--dv-drag-over-background-color:#53595d80;--dv-drag-over-border-color:transparent;--dv-tabs-container-scrollbar-color:#888;--dv-icon-hover-background-color:#5a5d5e4f;--dv-floating-box-shadow:8px 8px 8px 0px #53595d80;--dv-overlay-z-index:999;--dv-tab-font-size:inherit;--dv-border-radius:0px;--dv-tab-margin:0;--dv-sash-color:transparent;--dv-active-sash-color:transparent;--dv-active-sash-transition-duration:.1s;--dv-active-sash-transition-delay:.5s;--dv-color-abyss-dark:#000c18;--dv-color-abyss:#10192c;--dv-color-abyss-light:#1c1c2a;--dv-color-abyss-lighter:#2b2b4a;--dv-color-abyss-accent:#5b1ecf;--dv-color-abyss-primary-text:white;--dv-color-abyss-secondary-text:#9497a9;--dv-group-view-background-color:var(--dv-color-abyss-dark);--dv-tabs-and-actions-container-background-color:var(--dv-color-abyss-light);--dv-activegroup-visiblepanel-tab-background-color:var(--dv-color-abyss-dark);--dv-activegroup-hiddenpanel-tab-background-color:var(--dv-color-abyss);--dv-inactivegroup-visiblepanel-tab-background-color:var(--dv-color-abyss-dark);--dv-inactivegroup-hiddenpanel-tab-background-color:var(--dv-color-abyss);--dv-tab-divider-color:var(--dv-color-abyss-lighter);--dv-activegroup-visiblepanel-tab-color:white;--dv-activegroup-hiddenpanel-tab-color:#ffffff80;--dv-inactivegroup-visiblepanel-tab-color:#ffffff80;--dv-inactivegroup-hiddenpanel-tab-color:#ffffff40;--dv-separator-border:var(--dv-color-abyss-lighter);--dv-paneview-header-border-color:var(--dv-color-abyss-lighter)}.dockview-theme-abyss .dv-drop-target-container .dv-drop-target-anchor.dv-drop-target-anchor-container-changed{opacity:0;transition:none}.dockview-theme-dracula{--dv-paneview-active-outline-color:#6272a4;--dv-tabs-and-actions-container-font-size:13px;--dv-tabs-and-actions-container-height:35px;--dv-drag-over-background-color:#53595d80;--dv-drag-over-border-color:transparent;--dv-tabs-container-scrollbar-color:#888;--dv-icon-hover-background-color:#5a5d5e4f;--dv-floating-box-shadow:8px 8px 8px 0px #53595d80;--dv-overlay-z-index:999;--dv-tab-font-size:inherit;--dv-border-radius:0px;--dv-tab-margin:0;--dv-sash-color:transparent;--dv-active-sash-color:transparent;--dv-active-sash-transition-duration:.1s;--dv-active-sash-transition-delay:.5s;--dv-group-view-background-color:#282a36;--dv-tabs-and-actions-container-background-color:#191a21;--dv-activegroup-visiblepanel-tab-background-color:#282a36;--dv-activegroup-hiddenpanel-tab-background-color:#21222c;--dv-inactivegroup-visiblepanel-tab-background-color:#282a36;--dv-inactivegroup-hiddenpanel-tab-background-color:#21222c;--dv-tab-divider-color:#191a21;--dv-activegroup-visiblepanel-tab-color:#f8f8f2;--dv-activegroup-hiddenpanel-tab-color:#6272a4;--dv-inactivegroup-visiblepanel-tab-color:#f8f8f280;--dv-inactivegroup-hiddenpanel-tab-color:#6272a480;--dv-separator-border:#bd93f9;--dv-paneview-header-border-color:#bd93f9}.dockview-theme-dracula .dv-drop-target-container .dv-drop-target-anchor.dv-drop-target-anchor-container-changed{opacity:0;transition:none}.dockview-theme-dracula .dv-groupview.dv-active-group>.dv-tabs-and-actions-container .dv-tabs-container>.dv-tab.dv-active-tab{position:relative}.dockview-theme-dracula .dv-groupview.dv-active-group>.dv-tabs-and-actions-container .dv-tabs-container>.dv-tab.dv-active-tab:after{content:"";z-index:999;background-color:#94527e;width:100%;height:1px;position:absolute;top:0;left:0}.dockview-theme-dracula .dv-groupview.dv-inactive-group>.dv-tabs-and-actions-container .dv-tabs-container>.dv-tab.dv-active-tab{position:relative}.dockview-theme-dracula .dv-groupview.dv-inactive-group>.dv-tabs-and-actions-container .dv-tabs-container>.dv-tab.dv-active-tab:after{content:"";z-index:999;background-color:#5e3d5a;width:100%;height:1px;position:absolute;bottom:0;left:0}.dockview-theme-replit{--dv-paneview-active-outline-color:dodgerblue;--dv-tabs-and-actions-container-font-size:13px;--dv-tabs-and-actions-container-height:35px;--dv-drag-over-background-color:#53595d80;--dv-drag-over-border-color:transparent;--dv-tabs-container-scrollbar-color:#888;--dv-icon-hover-background-color:#5a5d5e4f;--dv-floating-box-shadow:8px 8px 8px 0px #53595d80;--dv-overlay-z-index:999;--dv-tab-font-size:inherit;--dv-border-radius:0px;--dv-tab-margin:0;--dv-sash-color:#cfd1d3;--dv-active-sash-color:#babbbb;--dv-active-sash-transition-duration:.1s;--dv-active-sash-transition-delay:.5s;box-sizing:border-box;--dv-group-view-background-color:#ebeced;--dv-tabs-and-actions-container-background-color:#fcfcfc;--dv-activegroup-visiblepanel-tab-background-color:#f0f1f2;--dv-activegroup-hiddenpanel-tab-background-color:#fcfcfc;--dv-inactivegroup-visiblepanel-tab-background-color:#f0f1f2;--dv-inactivegroup-hiddenpanel-tab-background-color:#fcfcfc;--dv-tab-divider-color:transparent;--dv-activegroup-visiblepanel-tab-color:#333;--dv-activegroup-hiddenpanel-tab-color:#333;--dv-inactivegroup-visiblepanel-tab-color:#333;--dv-inactivegroup-hiddenpanel-tab-color:#333;--dv-separator-border:transparent;--dv-paneview-header-border-color:#333;background-color:#ebeced;padding:10px}.dockview-theme-replit .dv-drop-target-container .dv-drop-target-anchor.dv-drop-target-anchor-container-changed{opacity:0;transition:none}.dockview-theme-replit .dv-resize-container:has(>.dv-groupview){border-radius:8px}.dockview-theme-replit .dv-resize-container{border:none;border-radius:10px!important}.dockview-theme-replit .dv-groupview{border-radius:10px;overflow:hidden}.dockview-theme-replit .dv-groupview .dv-tabs-and-actions-container{border-bottom:1px solid #80808059}.dockview-theme-replit .dv-groupview .dv-tabs-and-actions-container .dv-tab{border-radius:8px;margin:4px}.dockview-theme-replit .dv-groupview .dv-tabs-and-actions-container .dv-tab .dv-svg{width:8px;height:8px}.dockview-theme-replit .dv-groupview .dv-tabs-and-actions-container .dv-tab:hover{background-color:#e4e5e6!important}.dockview-theme-replit .dv-groupview .dv-content-container{background-color:#fcfcfc}.dockview-theme-replit .dv-groupview.dv-active-group{border:1px solid #80808059}.dockview-theme-replit .dv-groupview.dv-inactive-group{border:1px solid #0000}.dockview-theme-replit .dv-vertical>.dv-sash-container>.dv-sash{background-color:#0000}.dockview-theme-replit .dv-vertical>.dv-sash-container>.dv-sash:not(.disabled):after{content:"";background-color:var(--dv-sash-color);border-radius:2px;width:40px;height:4px;position:absolute;top:50%;left:50%;transform:translate(-50%,-50%)}.dockview-theme-replit .dv-vertical>.dv-sash-container>.dv-sash:not(.disabled):hover,.dockview-theme-replit .dv-vertical>.dv-sash-container>.dv-sash:not(.disabled):active{background-color:#0000}.dockview-theme-replit .dv-vertical>.dv-sash-container>.dv-sash:not(.disabled):hover:after,.dockview-theme-replit .dv-vertical>.dv-sash-container>.dv-sash:not(.disabled):active:after{background-color:var(--dv-active-sash-color)}.dockview-theme-replit .dv-horizontal>.dv-sash-container>.dv-sash{background-color:#0000}.dockview-theme-replit .dv-horizontal>.dv-sash-container>.dv-sash:not(.disabled):after{content:"";background-color:var(--dv-sash-color);border-radius:2px;width:4px;height:40px;position:absolute;top:50%;left:50%;transform:translate(-50%,-50%)}.dockview-theme-replit .dv-horizontal>.dv-sash-container>.dv-sash:not(.disabled):hover,.dockview-theme-replit .dv-horizontal>.dv-sash-container>.dv-sash:not(.disabled):active{background-color:#0000}.dockview-theme-replit .dv-horizontal>.dv-sash-container>.dv-sash:not(.disabled):hover:after,.dockview-theme-replit .dv-horizontal>.dv-sash-container>.dv-sash:not(.disabled):active:after{background-color:var(--dv-active-sash-color)}.dockview-theme-abyss-spaced{--dv-paneview-active-outline-color:dodgerblue;--dv-tabs-and-actions-container-font-size:13px;--dv-drag-over-background-color:"";--dv-drag-over-border-color:transparent;--dv-tabs-container-scrollbar-color:#888;--dv-icon-hover-background-color:#5a5d5e4f;--dv-floating-box-shadow:8px 8px 8px 0px #00000080;--dv-overlay-z-index:999;--dv-tab-font-size:12px;--dv-tab-margin:.5rem .25rem;--dv-sash-color:transparent;--dv-active-sash-color:var(--dv-color-abyss-accent);--dv-active-sash-transition-duration:.1s;--dv-active-sash-transition-delay:.5s;--dv-tabs-and-actions-container-height:44px;--dv-border-radius:20px;box-sizing:border-box;--dv-color-abyss-dark:#0b0611;--dv-color-abyss:#16121f;--dv-color-abyss-light:#201d2b;--dv-color-abyss-lighter:#2a2837;--dv-color-abyss-accent:#5b1ecf;--dv-color-abyss-primary-text:white;--dv-color-abyss-secondary-text:#9497a9;--dv-drag-over-border:2px solid var(--dv-color-abyss-accent);--dv-group-view-background-color:var(--dv-color-abyss-dark);--dv-tabs-and-actions-container-background-color:var(--dv-color-abyss);--dv-activegroup-visiblepanel-tab-background-color:var(--dv-color-abyss-lighter);--dv-activegroup-hiddenpanel-tab-background-color:var(--dv-color-abyss-light);--dv-inactivegroup-visiblepanel-tab-background-color:var(--dv-color-abyss-lighter);--dv-inactivegroup-hiddenpanel-tab-background-color:var(--dv-color-abyss-light);--dv-tab-divider-color:transparent;--dv-activegroup-visiblepanel-tab-color:var(--dv-color-abyss-primary-text);--dv-activegroup-hiddenpanel-tab-color:var(--dv-color-abyss-secondary-text);--dv-inactivegroup-visiblepanel-tab-color:var(--dv-color-abyss-primary-text);--dv-inactivegroup-hiddenpanel-tab-color:var(--dv-color-abyss-secondary-text);--dv-separator-border:transparent;--dv-paneview-header-border-color:#333;background-color:var(--dv-color-abyss-dark);padding:10px}.dockview-theme-abyss-spaced .dv-resize-container:has(>.dv-groupview){border-radius:8px}.dockview-theme-abyss-spaced .dv-sash{border-radius:4px}.dockview-theme-abyss-spaced .dv-drop-target-anchor{border-radius:calc(var(--dv-border-radius)/4)}.dockview-theme-abyss-spaced .dv-drop-target-anchor.dv-drop-target-content{border-radius:var(--dv-border-radius)}.dockview-theme-abyss-spaced .dv-resize-container{border:none;border-radius:var(--dv-border-radius)!important}.dockview-theme-abyss-spaced .dv-tabs-overflow-container,.dockview-theme-abyss-spaced .dv-tabs-overflow-dropdown-default{border-radius:8px;height:unset!important}.dockview-theme-abyss-spaced .dv-tab{border-radius:8px}.dockview-theme-abyss-spaced .dv-tab .dv-svg{width:8px;height:8px}.dockview-theme-abyss-spaced .dv-groupview{border-radius:var(--dv-border-radius)}.dockview-theme-abyss-spaced .dv-groupview .dv-tabs-and-actions-container{padding:0px calc(var(--dv-border-radius)/2)}.dockview-theme-abyss-spaced .dv-groupview .dv-content-container{background-color:var(--dv-tabs-and-actions-container-background-color)}.dockview-theme-abyss-spaced .dv-resize-container .dv-groupview{border:2px solid var(--dv-color-abyss-dark)}.dockview-theme-light-spaced{--dv-paneview-active-outline-color:dodgerblue;--dv-tabs-and-actions-container-font-size:13px;--dv-drag-over-background-color:"";--dv-drag-over-border-color:transparent;--dv-tabs-container-scrollbar-color:#888;--dv-icon-hover-background-color:#5a5d5e4f;--dv-floating-box-shadow:8px 8px 8px 0px #0000001a;--dv-overlay-z-index:999;--dv-tab-font-size:12px;--dv-tab-margin:.5rem .25rem;--dv-sash-color:transparent;--dv-active-sash-color:#5b1ecf;--dv-active-sash-transition-duration:.1s;--dv-active-sash-transition-delay:.5s;--dv-tabs-and-actions-container-height:44px;--dv-border-radius:20px;box-sizing:border-box;--dv-drag-over-border:2px solid #5b1ecf;--dv-group-view-background-color:#f6f5f9;--dv-tabs-and-actions-container-background-color:white;--dv-activegroup-visiblepanel-tab-background-color:#ededf0;--dv-activegroup-hiddenpanel-tab-background-color:#f9f9fa;--dv-inactivegroup-visiblepanel-tab-background-color:#ededf0;--dv-inactivegroup-hiddenpanel-tab-background-color:#f9f9fa;--dv-tab-divider-color:transparent;--dv-activegroup-visiblepanel-tab-color:#686b82;--dv-activegroup-hiddenpanel-tab-color:#9497a9;--dv-inactivegroup-visiblepanel-tab-color:#686b82;--dv-inactivegroup-hiddenpanel-tab-color:#9497a9;--dv-separator-border:transparent;--dv-paneview-header-border-color:#333;--dv-scrollbar-background-color:#00000040;background-color:#f6f5f9;padding:10px}.dockview-theme-light-spaced .dv-resize-container:has(>.dv-groupview){border-radius:8px}.dockview-theme-light-spaced .dv-sash{border-radius:4px}.dockview-theme-light-spaced .dv-drop-target-anchor{border-radius:calc(var(--dv-border-radius)/4)}.dockview-theme-light-spaced .dv-drop-target-anchor.dv-drop-target-content{border-radius:var(--dv-border-radius)}.dockview-theme-light-spaced .dv-resize-container{border:none;border-radius:var(--dv-border-radius)!important}.dockview-theme-light-spaced .dv-tabs-overflow-container,.dockview-theme-light-spaced .dv-tabs-overflow-dropdown-default{border-radius:8px;height:unset!important}.dockview-theme-light-spaced .dv-tab{border-radius:8px}.dockview-theme-light-spaced .dv-tab .dv-svg{width:8px;height:8px}.dockview-theme-light-spaced .dv-groupview{border-radius:var(--dv-border-radius)}.dockview-theme-light-spaced .dv-groupview .dv-tabs-and-actions-container{padding:0px calc(var(--dv-border-radius)/2)}.dockview-theme-light-spaced .dv-groupview .dv-content-container{background-color:var(--dv-tabs-and-actions-container-background-color)}.dockview-theme-light-spaced .dv-resize-container .dv-groupview{border:2px solid #ffffff1a}.dv-drop-target-container{z-index:9999;pointer-events:none;--dv-transition-duration:.3s;width:100%;height:100%;position:absolute;top:0;left:0;overflow:hidden}.dv-drop-target-container .dv-drop-target-anchor{border:var(--dv-drag-over-border);background-color:var(--dv-drag-over-background-color);opacity:1;will-change:transform,opacity;backface-visibility:hidden;contain:layout paint;transition:opacity var(--dv-transition-duration)ease-in,transform var(--dv-transition-duration)ease-out;position:relative;transform:translate(0,0)}.dv-drop-target{--dv-transition-duration:70ms;position:relative}.dv-drop-target>.dv-drop-target-dropzone{z-index:1000;pointer-events:none;width:100%;height:100%;position:absolute;top:0;left:0}.dv-drop-target>.dv-drop-target-dropzone>.dv-drop-target-selection{box-sizing:border-box;border:var(--dv-drag-over-border);background-color:var(--dv-drag-over-background-color);width:100%;height:100%;transition:top var(--dv-transition-duration)ease-out,left var(--dv-transition-duration)ease-out,width var(--dv-transition-duration)ease-out,height var(--dv-transition-duration)ease-out,opacity var(--dv-transition-duration)ease-out;will-change:transform;pointer-events:none;position:relative}.dv-drop-target>.dv-drop-target-dropzone>.dv-drop-target-selection.dv-drop-target-top.dv-drop-target-small-vertical{border-top:1px solid var(--dv-drag-over-border-color)}.dv-drop-target>.dv-drop-target-dropzone>.dv-drop-target-selection.dv-drop-target-bottom.dv-drop-target-small-vertical{border-bottom:1px solid var(--dv-drag-over-border-color)}.dv-drop-target>.dv-drop-target-dropzone>.dv-drop-target-selection.dv-drop-target-left.dv-drop-target-small-horizontal{border-left:1px solid var(--dv-drag-over-border-color)}.dv-drop-target>.dv-drop-target-dropzone>.dv-drop-target-selection.dv-drop-target-right.dv-drop-target-small-horizontal{border-right:1px solid var(--dv-drag-over-border-color)}.dv-dockview{background-color:var(--dv-group-view-background-color);contain:layout;position:relative}.dv-dockview .dv-watermark-container{z-index:1;width:100%;height:100%;position:absolute;top:0;left:0}.dv-dockview .dv-overlay-render-container{position:relative}.dv-groupview.dv-active-group>.dv-tabs-and-actions-container .dv-tabs-container>.dv-tab.dv-active-tab{background-color:var(--dv-activegroup-visiblepanel-tab-background-color);color:var(--dv-activegroup-visiblepanel-tab-color)}.dv-groupview.dv-active-group>.dv-tabs-and-actions-container .dv-tabs-container>.dv-tab.dv-inactive-tab{background-color:var(--dv-activegroup-hiddenpanel-tab-background-color);color:var(--dv-activegroup-hiddenpanel-tab-color)}.dv-groupview.dv-inactive-group>.dv-tabs-and-actions-container .dv-tabs-container>.dv-tab.dv-active-tab{background-color:var(--dv-inactivegroup-visiblepanel-tab-background-color);color:var(--dv-inactivegroup-visiblepanel-tab-color)}.dv-groupview.dv-inactive-group>.dv-tabs-and-actions-container .dv-tabs-container>.dv-tab.dv-inactive-tab{background-color:var(--dv-inactivegroup-hiddenpanel-tab-background-color);color:var(--dv-inactivegroup-hiddenpanel-tab-color)}.dv-tab.dv-tab-dragging{background-color:var(--dv-activegroup-visiblepanel-tab-background-color);color:var(--dv-activegroup-visiblepanel-tab-color)}.dv-groupview{background-color:var(--dv-group-view-background-color);flex-direction:column;height:100%;display:flex;overflow:hidden}.dv-groupview:focus{outline:none}.dv-groupview>.dv-content-container{outline:none;flex-grow:1;min-height:0}.dv-root-wrapper,.dv-grid-view,.dv-branch-node{width:100%;height:100%}.dv-debug .dv-resize-container .dv-resize-handle-top{background-color:red}.dv-debug .dv-resize-container .dv-resize-handle-bottom{background-color:green}.dv-debug .dv-resize-container .dv-resize-handle-left{background-color:#ff0}.dv-debug .dv-resize-container .dv-resize-handle-right{background-color:#00f}.dv-debug .dv-resize-container .dv-resize-handle-topleft,.dv-debug .dv-resize-container .dv-resize-handle-topright,.dv-debug .dv-resize-container .dv-resize-handle-bottomleft,.dv-debug .dv-resize-container .dv-resize-handle-bottomright{background-color:#0ff}.dv-resize-container{--dv-overlay-z-index:var(--dv-overlay-z-index,999);z-index:calc(var(--dv-overlay-z-index) - 2);border:1px solid var(--dv-tab-divider-color);box-shadow:var(--dv-floating-box-shadow);will-change:transform,opacity;backface-visibility:hidden;position:absolute;transform:translate(0,0)}.dv-resize-container.dv-hidden{display:none}.dv-resize-container.dv-resize-container-dragging{opacity:.5;will-change:transform,opacity}.dv-resize-container .dv-resize-handle-top{width:calc(100% - 8px);height:4px;z-index:var(--dv-overlay-z-index);cursor:ns-resize;position:absolute;top:-2px;left:4px}.dv-resize-container .dv-resize-handle-bottom{width:calc(100% - 8px);height:4px;z-index:var(--dv-overlay-z-index);cursor:ns-resize;position:absolute;bottom:-2px;left:4px}.dv-resize-container .dv-resize-handle-left{width:4px;height:calc(100% - 8px);z-index:var(--dv-overlay-z-index);cursor:ew-resize;position:absolute;top:4px;left:-2px}.dv-resize-container .dv-resize-handle-right{width:4px;height:calc(100% - 8px);z-index:var(--dv-overlay-z-index);cursor:ew-resize;position:absolute;top:4px;right:-2px}.dv-resize-container .dv-resize-handle-topleft{width:4px;height:4px;z-index:var(--dv-overlay-z-index);cursor:nw-resize;position:absolute;top:-2px;left:-2px}.dv-resize-container .dv-resize-handle-topright{width:4px;height:4px;z-index:var(--dv-overlay-z-index);cursor:ne-resize;position:absolute;top:-2px;right:-2px}.dv-resize-container .dv-resize-handle-bottomleft{width:4px;height:4px;z-index:var(--dv-overlay-z-index);cursor:sw-resize;position:absolute;bottom:-2px;left:-2px}.dv-resize-container .dv-resize-handle-bottomright{width:4px;height:4px;z-index:var(--dv-overlay-z-index);cursor:se-resize;position:absolute;bottom:-2px;right:-2px}.dv-render-overlay{--dv-overlay-z-index:var(--dv-overlay-z-index,999);z-index:1;contain:layout paint;isolation:isolate;will-change:transform;backface-visibility:hidden;width:100%;height:100%;position:absolute;transform:translate(0,0)}.dv-render-overlay.dv-render-overlay-float{z-index:calc(var(--dv-overlay-z-index) - 1)}.dv-debug .dv-render-overlay{outline-offset:-1;outline:1px solid red}.dv-pane-container{width:100%;height:100%}.dv-pane-container.dv-animated .dv-view{will-change:transform;backface-visibility:hidden;transition:transform .15s ease-out;transform:translate(0,0)}.dv-pane-container .dv-view{flex-direction:column;display:flex;overflow:hidden;padding:0!important}.dv-pane-container .dv-view:not(:first-child):before{background-color:#0000!important}.dv-pane-container .dv-view:not(:first-child) .dv-pane>.dv-pane-header{border-top:1px solid var(--dv-paneview-header-border-color)}.dv-pane-container .dv-view .dv-default-header{background-color:var(--dv-group-view-background-color);color:var(--dv-activegroup-visiblepanel-tab-color);cursor:pointer;padding:0 8px;display:flex}.dv-pane-container .dv-view .dv-default-header .dv-pane-header-icon{justify-content:center;align-items:center;display:flex}.dv-pane-container .dv-view .dv-default-header>span{flex-grow:1;padding-left:8px}.dv-pane-container:first-of-type>.dv-pane>.dv-pane-header{border-top:none!important}.dv-pane-container .dv-pane{flex-direction:column;height:100%;display:flex;overflow:hidden}.dv-pane-container .dv-pane .dv-pane-header{box-sizing:border-box;-webkit-user-select:none;user-select:none;outline:none;position:relative}.dv-pane-container .dv-pane .dv-pane-header.dv-pane-draggable{cursor:pointer}.dv-pane-container .dv-pane .dv-pane-header:focus:before,.dv-pane-container .dv-pane .dv-pane-header:focus-within:before{z-index:5;content:"";pointer-events:none;outline-offset:-1px;outline:-1px solid;outline-color:var(--dv-paneview-active-outline-color);width:100%;height:100%;position:absolute;top:0;left:0}.dv-pane-container .dv-pane .dv-pane-body{outline:none;flex-grow:1;position:relative;overflow:hidden auto}.dv-pane-container .dv-pane .dv-pane-body:focus:before,.dv-pane-container .dv-pane .dv-pane-body:focus-within:before{z-index:5;content:"";pointer-events:none;outline-offset:-1px;outline:-1px solid;outline-color:var(--dv-paneview-active-outline-color);width:100%;height:100%;position:absolute;top:0;left:0}.dv-debug .dv-split-view-container .dv-sash-container .dv-sash.dv-enabled{background-color:#000}.dv-debug .dv-split-view-container .dv-sash-container .dv-sash.dv-disabled{background-color:orange}.dv-debug .dv-split-view-container .dv-sash-container .dv-sash.dv-maximum{background-color:green}.dv-debug .dv-split-view-container .dv-sash-container .dv-sash.dv-minimum{background-color:red}.dv-split-view-container{width:100%;height:100%;position:relative;overflow:hidden}.dv-split-view-container.dv-splitview-disabled>.dv-sash-container>.dv-sash{pointer-events:none}.dv-split-view-container.dv-animation .dv-view,.dv-split-view-container.dv-animation .dv-sash{will-change:transform;backface-visibility:hidden;transition:transform .15s ease-out;transform:translate(0,0)}.dv-split-view-container.dv-horizontal{height:100%}.dv-split-view-container.dv-horizontal>.dv-sash-container>.dv-sash{width:4px;height:100%}.dv-split-view-container.dv-horizontal>.dv-sash-container>.dv-sash.dv-enabled{cursor:ew-resize}.dv-split-view-container.dv-horizontal>.dv-sash-container>.dv-sash.dv-disabled{cursor:default}.dv-split-view-container.dv-horizontal>.dv-sash-container>.dv-sash.dv-maximum{cursor:w-resize}.dv-split-view-container.dv-horizontal>.dv-sash-container>.dv-sash.dv-minimum{cursor:e-resize}.dv-split-view-container.dv-horizontal>.dv-view-container>.dv-view:not(:first-child):before{width:1px;height:100%}.dv-split-view-container.dv-vertical{width:100%}.dv-split-view-container.dv-vertical>.dv-sash-container>.dv-sash{width:100%;height:4px}.dv-split-view-container.dv-vertical>.dv-sash-container>.dv-sash.dv-enabled{cursor:ns-resize}.dv-split-view-container.dv-vertical>.dv-sash-container>.dv-sash.dv-disabled{cursor:default}.dv-split-view-container.dv-vertical>.dv-sash-container>.dv-sash.dv-maximum{cursor:n-resize}.dv-split-view-container.dv-vertical>.dv-sash-container>.dv-sash.dv-minimum{cursor:s-resize}.dv-split-view-container.dv-vertical>.dv-view-container>.dv-view{width:100%}.dv-split-view-container.dv-vertical>.dv-view-container>.dv-view:not(:first-child):before{width:100%;height:1px}.dv-split-view-container .dv-sash-container{width:100%;height:100%;position:absolute}.dv-split-view-container .dv-sash-container .dv-sash{z-index:99;-webkit-user-select:none;user-select:none;touch-action:none;background-color:var(--dv-sash-color,transparent);outline:none;position:absolute}.dv-split-view-container .dv-sash-container .dv-sash:not(.disabled):active,.dv-split-view-container .dv-sash-container .dv-sash:not(.disabled):hover{background-color:var(--dv-active-sash-color,transparent);transition-property:background-color;transition-timing-function:ease-in-out;transition-duration:var(--dv-active-sash-transition-duration,.1s);transition-delay:var(--dv-active-sash-transition-delay,.5s)}.dv-split-view-container .dv-view-container{width:100%;height:100%;position:relative}.dv-split-view-container .dv-view-container .dv-view{box-sizing:border-box;height:100%;position:absolute;overflow:auto}.dv-split-view-container.dv-separator-border .dv-view:not(:first-child):before{content:" ";z-index:5;pointer-events:none;background-color:var(--dv-separator-border);position:absolute;top:0;left:0}.dv-dragged{transform:translate(0,0)}.dv-tab{flex-shrink:0}.dv-tab:focus-within,.dv-tab:focus{position:relative}.dv-tab:focus-within:after,.dv-tab:focus:after{content:"";pointer-events:none;outline-offset:-1px;z-index:5;width:100%;height:100%;position:absolute;top:0;left:0;outline:1px solid var(--dv-tab-divider-color)!important}.dv-tab.dv-tab-dragging .dv-default-tab-action{background-color:var(--dv-activegroup-visiblepanel-tab-color)}.dv-tab.dv-active-tab .dv-default-tab .dv-default-tab-action{visibility:visible}.dv-tab.dv-inactive-tab .dv-default-tab .dv-default-tab-action{visibility:hidden}.dv-tab.dv-inactive-tab .dv-default-tab:hover .dv-default-tab-action{visibility:visible}.dv-tab .dv-default-tab{white-space:nowrap;text-overflow:ellipsis;align-items:center;height:100%;display:flex;position:relative}.dv-tab .dv-default-tab .dv-default-tab-content{flex-grow:1;margin-right:4px}.dv-tab .dv-default-tab .dv-default-tab-action{box-sizing:border-box;justify-content:center;align-items:center;padding:4px;display:flex}.dv-tab .dv-default-tab .dv-default-tab-action:hover{background-color:var(--dv-icon-hover-background-color);border-radius:2px}.dv-tabs-overflow-dropdown-default{height:100%;color:var(--dv-activegroup-hiddenpanel-tab-color);margin:var(--dv-tab-margin);cursor:pointer;flex-shrink:0;align-items:center;padding:.25rem .5rem;display:flex}.dv-tabs-overflow-dropdown-default>span{padding-left:.25rem}.dv-tabs-overflow-dropdown-default>svg{transform:rotate(90deg)}.dv-tabs-container{scrollbar-width:thin;will-change:scroll-position;height:100%;display:flex;overflow:auto;transform:translate(0,0)}.dv-tabs-container.dv-horizontal .dv-tab:not(:first-child):before{content:" ";z-index:5;pointer-events:none;background-color:var(--dv-tab-divider-color);width:1px;height:100%;position:absolute;top:0;left:0}.dv-tabs-container::-webkit-scrollbar{height:3px}.dv-tabs-container::-webkit-scrollbar-track{background:0 0}.dv-tabs-container::-webkit-scrollbar-thumb{background:var(--dv-tabs-container-scrollbar-color)}.dv-scrollable>.dv-tabs-container{overflow:hidden}.dv-tab{-webkit-user-drag:element;cursor:pointer;box-sizing:border-box;font-size:var(--dv-tab-font-size);margin:var(--dv-tab-margin);outline:none;padding:.25rem .5rem;position:relative}.dv-tabs-overflow-container{height:unset;border:1px solid var(--dv-tab-divider-color);background-color:var(--dv-group-view-background-color);flex-direction:column}.dv-tabs-overflow-container .dv-tab:not(:last-child){border-bottom:1px solid var(--dv-tab-divider-color)}.dv-tabs-overflow-container .dv-active-tab{background-color:var(--dv-activegroup-visiblepanel-tab-background-color);color:var(--dv-activegroup-visiblepanel-tab-color)}.dv-tabs-overflow-container .dv-inactive-tab{background-color:var(--dv-activegroup-hiddenpanel-tab-background-color);color:var(--dv-activegroup-hiddenpanel-tab-color)}.dv-tabs-and-actions-container{background-color:var(--dv-tabs-and-actions-container-background-color);box-sizing:border-box;height:var(--dv-tabs-and-actions-container-height);font-size:var(--dv-tabs-and-actions-container-font-size);flex-shrink:0;display:flex}.dv-tabs-and-actions-container.dv-single-tab.dv-full-width-single-tab .dv-scrollable,.dv-tabs-and-actions-container.dv-single-tab.dv-full-width-single-tab .dv-tabs-container{flex-grow:1}.dv-tabs-and-actions-container.dv-single-tab.dv-full-width-single-tab .dv-tabs-container .dv-tab{flex-grow:1;padding:0}.dv-tabs-and-actions-container.dv-single-tab.dv-full-width-single-tab .dv-void-container{flex-grow:0}.dv-tabs-and-actions-container .dv-void-container{flex-grow:1;display:flex}.dv-tabs-and-actions-container .dv-void-container.dv-draggable{cursor:grab}.dv-tabs-and-actions-container .dv-right-actions-container{display:flex}.dv-watermark{height:100%;display:flex}
|
| 3 |
+
*,:before,:after,::backdrop{--tw-border-spacing-x:0;--tw-border-spacing-y:0;--tw-translate-x:0;--tw-translate-y:0;--tw-rotate:0;--tw-skew-x:0;--tw-skew-y:0;--tw-scale-x:1;--tw-scale-y:1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness:proximity;--tw-gradient-from-position: ;--tw-gradient-via-position: ;--tw-gradient-to-position: ;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width:0px;--tw-ring-offset-color:#fff;--tw-ring-color:#3b82f680;--tw-ring-offset-shadow:0 0 #0000;--tw-ring-shadow:0 0 #0000;--tw-shadow:0 0 #0000;--tw-shadow-colored:0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: ;--tw-contain-size: ;--tw-contain-layout: ;--tw-contain-paint: ;--tw-contain-style: }*,:before,:after{box-sizing:border-box;border:0 solid #e5e7eb}:before,:after{--tw-content:""}html,:host{-webkit-text-size-adjust:100%;tab-size:4;font-feature-settings:normal;font-variation-settings:normal;-webkit-tap-highlight-color:transparent;font-family:ui-sans-serif,system-ui,sans-serif,Apple Color Emoji,Segoe UI Emoji,Segoe UI Symbol,Noto Color Emoji;line-height:1.5}body{line-height:inherit;margin:0}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;-webkit-text-decoration:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-feature-settings:normal;font-variation-settings:normal;font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-size:1em}small{font-size:80%}sub,sup{vertical-align:baseline;font-size:75%;line-height:0;position:relative}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-feature-settings:inherit;font-variation-settings:inherit;font-family:inherit;font-size:100%;font-weight:inherit;line-height:inherit;letter-spacing:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,input:where([type=button]),input:where([type=reset]),input:where([type=submit]){-webkit-appearance:button;background-color:#0000;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button{height:auto}::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dl,dd,h1,h2,h3,h4,h5,h6,hr,figure,p,pre{margin:0}fieldset{margin:0;padding:0}legend{padding:0}ol,ul,menu{margin:0;padding:0;list-style:none}dialog{padding:0}textarea{resize:vertical}input::-moz-placeholder{opacity:1;color:#9ca3af}textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}button,[role=button]{cursor:pointer}:disabled{cursor:default}img,svg,video,canvas,audio,iframe,embed,object{vertical-align:middle;display:block}img,video{max-width:100%;height:auto}[hidden]:where(:not([hidden=until-found])){display:none}html,body{height:100%;margin:0;padding:0;overflow:hidden}:root{--background:215 28% 7%;--foreground:213 27% 92%;--card:215 21% 11%;--card-foreground:213 27% 92%;--popover:215 21% 11%;--popover-foreground:213 27% 92%;--primary:212 100% 67%;--primary-foreground:215 28% 7%;--secondary:215 14% 17%;--secondary-foreground:213 27% 92%;--muted:215 14% 17%;--muted-foreground:213 12% 58%;--accent:215 14% 17%;--accent-foreground:213 27% 92%;--destructive:0 62% 50%;--destructive-foreground:0 0% 98%;--border:215 14% 22%;--input:215 14% 22%;--ring:212 100% 67%;--radius:.375rem;--surface:215 21% 11%;--surface-light:215 14% 17%;--surface-elevated:215 14% 22%;--border-subtle:215 14% 17%;--text:213 27% 92%;--text-muted:213 12% 58%;--text-subtle:215 10% 46%;--accent-cyan:176 60% 53%;--accent-orange:27 86% 59%}*{border-color:hsl(var(--border))}body{background-color:hsl(var(--background));color:hsl(var(--foreground));letter-spacing:-.15px;font-feature-settings:"liga" 1,"calt" 1;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;font-size:12px;font-weight:500;line-height:16px}.\!container{width:100%!important}.container{width:100%}@media (min-width:640px){.\!container{max-width:640px!important}.container{max-width:640px}}@media (min-width:768px){.\!container{max-width:768px!important}.container{max-width:768px}}@media (min-width:1024px){.\!container{max-width:1024px!important}.container{max-width:1024px}}@media (min-width:1280px){.\!container{max-width:1280px!important}.container{max-width:1280px}}@media (min-width:1536px){.\!container{max-width:1536px!important}.container{max-width:1536px}}.sr-only{clip:rect(0,0,0,0);white-space:nowrap;border-width:0;width:1px;height:1px;margin:-1px;padding:0;position:absolute;overflow:hidden}.pointer-events-none{pointer-events:none}.visible{visibility:visible}.static{position:static}.fixed{position:fixed}.absolute{position:absolute}.relative{position:relative}.inset-0{inset:0}.bottom-0\.5{bottom:.125rem}.left-0\.5{left:.125rem}.left-1\.5{left:.375rem}.left-\[50\%\]{left:50%}.right-0\.5{right:.125rem}.right-3{right:.75rem}.right-4{right:1rem}.top-0\.5{top:.125rem}.top-3{top:.75rem}.top-4{top:1rem}.top-\[50\%\]{top:50%}.z-10{z-index:10}.z-50{z-index:50}.-mx-1{margin-left:-.25rem;margin-right:-.25rem}.mx-1{margin-left:.25rem;margin-right:.25rem}.mx-auto{margin-left:auto;margin-right:auto}.my-0\.5{margin-top:.125rem;margin-bottom:.125rem}.mb-2{margin-bottom:.5rem}.mb-4{margin-bottom:1rem}.ml-0\.5{margin-left:.125rem}.ml-1{margin-left:.25rem}.ml-2{margin-left:.5rem}.ml-auto{margin-left:auto}.mr-2{margin-right:.5rem}.mt-4{margin-top:1rem}.inline-block{display:inline-block}.inline{display:inline}.flex{display:flex}.inline-flex{display:inline-flex}.grid{display:grid}.hidden{display:none}.h-10{height:2.5rem}.h-12{height:3rem}.h-16{height:4rem}.h-2{height:.5rem}.h-2\.5{height:.625rem}.h-3{height:.75rem}.h-3\.5{height:.875rem}.h-4{height:1rem}.h-5{height:1.25rem}.h-6{height:1.5rem}.h-7{height:1.75rem}.h-8{height:2rem}.h-9{height:2.25rem}.h-\[10px\]{height:10px}.h-\[1px\]{height:1px}.h-full{height:100%}.h-px{height:1px}.h-screen{height:100vh}.max-h-\[300px\]{max-height:300px}.max-h-\[var\(--radix-dropdown-menu-content-available-height\)\]{max-height:var(--radix-dropdown-menu-content-available-height)}.min-h-0{min-height:0}.min-h-\[24px\]{min-height:24px}.min-h-\[28px\]{min-height:28px}.w-12{width:3rem}.w-16{width:4rem}.w-2{width:.5rem}.w-2\.5{width:.625rem}.w-3{width:.75rem}.w-3\.5{width:.875rem}.w-4{width:1rem}.w-48{width:12rem}.w-5{width:1.25rem}.w-6{width:1.5rem}.w-72{width:18rem}.w-8{width:2rem}.w-9{width:2.25rem}.w-\[10px\]{width:10px}.w-\[1px\]{width:1px}.w-\[280px\]{width:280px}.w-\[600px\]{width:600px}.w-full{width:100%}.w-px{width:1px}.min-w-0{min-width:0}.min-w-10{min-width:2.5rem}.min-w-8{min-width:2rem}.min-w-9{min-width:2.25rem}.min-w-\[10rem\]{min-width:10rem}.min-w-\[220px\]{min-width:220px}.min-w-\[240px\]{min-width:240px}.max-w-\[220px\]{max-width:220px}.max-w-\[260px\]{max-width:260px}.max-w-\[340px\]{max-width:340px}.max-w-\[60vw\]{max-width:60vw}.max-w-full{max-width:100%}.max-w-lg{max-width:32rem}.max-w-md{max-width:28rem}.flex-1{flex:1}.flex-shrink-0,.shrink-0{flex-shrink:0}.origin-\[--radix-dropdown-menu-content-transform-origin\]{transform-origin:var(--radix-dropdown-menu-content-transform-origin)}.origin-\[--radix-popover-content-transform-origin\]{transform-origin:var(--radix-popover-content-transform-origin)}.origin-\[--radix-tooltip-content-transform-origin\]{transform-origin:var(--radix-tooltip-content-transform-origin)}.translate-x-\[-50\%\]{--tw-translate-x:-50%;transform:translate(var(--tw-translate-x),var(--tw-translate-y))rotate(var(--tw-rotate))skewX(var(--tw-skew-x))skewY(var(--tw-skew-y))scaleX(var(--tw-scale-x))scaleY(var(--tw-scale-y))}.translate-y-\[-50\%\]{--tw-translate-y:-50%;transform:translate(var(--tw-translate-x),var(--tw-translate-y))rotate(var(--tw-rotate))skewX(var(--tw-skew-x))skewY(var(--tw-skew-y))scaleX(var(--tw-scale-x))scaleY(var(--tw-scale-y))}.transform{transform:translate(var(--tw-translate-x),var(--tw-translate-y))rotate(var(--tw-rotate))skewX(var(--tw-skew-x))skewY(var(--tw-skew-y))scaleX(var(--tw-scale-x))scaleY(var(--tw-scale-y))}@keyframes spin{to{transform:rotate(360deg)}}.animate-spin{animation:1s linear infinite spin}.cursor-default{cursor:default}.cursor-pointer{cursor:pointer}.touch-none{touch-action:none}.select-none{-webkit-user-select:none;user-select:none}.resize{resize:both}.flex-col{flex-direction:column}.flex-col-reverse{flex-direction:column-reverse}.items-center{align-items:center}.justify-start{justify-content:flex-start}.justify-center{justify-content:center}.justify-between{justify-content:space-between}.gap-0\.5{gap:.125rem}.gap-1{gap:.25rem}.gap-1\.5{gap:.375rem}.gap-2{gap:.5rem}.gap-4{gap:1rem}.space-y-1\.5>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(.375rem*calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(.375rem*var(--tw-space-y-reverse))}.space-y-px>:not([hidden])~:not([hidden]){--tw-space-y-reverse:0;margin-top:calc(1px*calc(1 - var(--tw-space-y-reverse)));margin-bottom:calc(1px*var(--tw-space-y-reverse))}.overflow-auto{overflow:auto}.overflow-hidden{overflow:hidden}.overflow-x-auto{overflow-x:auto}.overflow-y-auto{overflow-y:auto}.overflow-x-hidden{overflow-x:hidden}.truncate{text-overflow:ellipsis;white-space:nowrap;overflow:hidden}.whitespace-nowrap{white-space:nowrap}.rounded{border-radius:.25rem}.rounded-\[3px\]{border-radius:3px}.rounded-\[inherit\]{border-radius:inherit}.rounded-full{border-radius:9999px}.rounded-lg{border-radius:var(--radius)}.rounded-md{border-radius:calc(var(--radius) - 2px)}.rounded-sm{border-radius:calc(var(--radius) - 4px)}.border{border-width:1px}.border-2{border-width:2px}.border-b{border-bottom-width:1px}.border-l{border-left-width:1px}.border-t{border-top-width:1px}.border-border{border-color:hsl(var(--border))}.border-border\/50{border-color:hsl(var(--border)/.5)}.border-input{border-color:hsl(var(--input))}.border-primary{border-color:hsl(var(--primary))}.border-l-transparent{border-left-color:#0000}.border-t-transparent{border-top-color:#0000}.bg-background{background-color:hsl(var(--background))}.bg-black\/80{background-color:#000c}.bg-border{background-color:hsl(var(--border))}.bg-border\/80{background-color:hsl(var(--border)/.8)}.bg-card{background-color:hsl(var(--card))}.bg-card\/80{background-color:hsl(var(--card)/.8)}.bg-card\/85{background-color:hsl(var(--card)/.85)}.bg-destructive{background-color:hsl(var(--destructive))}.bg-muted{background-color:hsl(var(--muted))}.bg-muted-foreground\/25{background-color:hsl(var(--muted-foreground)/.25)}.bg-muted\/40{background-color:hsl(var(--muted)/.4)}.bg-muted\/50{background-color:hsl(var(--muted)/.5)}.bg-muted\/60{background-color:hsl(var(--muted)/.6)}.bg-popover{background-color:hsl(var(--popover))}.bg-primary{background-color:hsl(var(--primary))}.bg-secondary{background-color:hsl(var(--secondary))}.bg-secondary\/20{background-color:hsl(var(--secondary)/.2)}.bg-transparent{background-color:#0000}.fill-current{fill:currentColor}.object-cover{-o-object-fit:cover;object-fit:cover}.p-0{padding:0}.p-1{padding:.25rem}.p-1\.5{padding:.375rem}.p-4{padding:1rem}.p-6{padding:1.5rem}.p-\[2px\]{padding:2px}.px-1{padding-left:.25rem;padding-right:.25rem}.px-1\.5{padding-left:.375rem;padding-right:.375rem}.px-2{padding-left:.5rem;padding-right:.5rem}.px-2\.5{padding-left:.625rem;padding-right:.625rem}.px-3{padding-left:.75rem;padding-right:.75rem}.px-4{padding-left:1rem;padding-right:1rem}.px-8{padding-left:2rem;padding-right:2rem}.py-0{padding-top:0;padding-bottom:0}.py-0\.5{padding-top:.125rem;padding-bottom:.125rem}.py-1{padding-top:.25rem;padding-bottom:.25rem}.py-1\.5{padding-top:.375rem;padding-bottom:.375rem}.py-2{padding-top:.5rem;padding-bottom:.5rem}.py-3{padding-top:.75rem;padding-bottom:.75rem}.py-4{padding-top:1rem;padding-bottom:1rem}.py-6{padding-top:1.5rem;padding-bottom:1.5rem}.pl-6{padding-left:1.5rem}.pl-8{padding-left:2rem}.pr-1\.5{padding-right:.375rem}.pt-0{padding-top:0}.text-left{text-align:left}.text-center{text-align:center}.font-mono{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace}.text-\[10px\]{font-size:10px}.text-\[11px\]{font-size:11px}.text-\[12px\]{font-size:12px}.text-lg{font-size:1.125rem;line-height:1.75rem}.text-sm{font-size:.875rem;line-height:1.25rem}.text-xs{font-size:.75rem;line-height:1rem}.font-medium{font-weight:500}.font-normal{font-weight:400}.font-semibold{font-weight:600}.uppercase{text-transform:uppercase}.tabular-nums{--tw-numeric-spacing:tabular-nums;font-variant-numeric:var(--tw-ordinal)var(--tw-slashed-zero)var(--tw-numeric-figure)var(--tw-numeric-spacing)var(--tw-numeric-fraction)}.leading-4{line-height:1rem}.leading-\[16px\]{line-height:16px}.leading-none{line-height:1}.leading-tight{line-height:1.25}.tracking-\[-0\.15px\]{letter-spacing:-.15px}.tracking-\[0\.06em\]{letter-spacing:.06em}.tracking-tight{letter-spacing:-.025em}.tracking-widest{letter-spacing:.1em}.text-destructive{color:hsl(var(--destructive))}.text-destructive-foreground{color:hsl(var(--destructive-foreground))}.text-foreground{color:hsl(var(--foreground))}.text-foreground\/70{color:hsl(var(--foreground)/.7)}.text-muted-foreground{color:hsl(var(--muted-foreground))}.text-muted-foreground\/20{color:hsl(var(--muted-foreground)/.2)}.text-muted-foreground\/50{color:hsl(var(--muted-foreground)/.5)}.text-muted-foreground\/60{color:hsl(var(--muted-foreground)/.6)}.text-muted-foreground\/70{color:hsl(var(--muted-foreground)/.7)}.text-popover-foreground{color:hsl(var(--popover-foreground))}.text-primary{color:hsl(var(--primary))}.text-primary-foreground{color:hsl(var(--primary-foreground))}.text-secondary-foreground{color:hsl(var(--secondary-foreground))}.text-white{--tw-text-opacity:1;color:rgb(255 255 255/var(--tw-text-opacity,1))}.underline-offset-4{text-underline-offset:4px}.antialiased{-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.opacity-40{opacity:.4}.opacity-50{opacity:.5}.opacity-70{opacity:.7}.shadow{--tw-shadow:0 1px 3px 0 #0000001a,0 1px 2px -1px #0000001a;--tw-shadow-colored:0 1px 3px 0 var(--tw-shadow-color),0 1px 2px -1px var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.shadow-lg{--tw-shadow:0 10px 15px -3px #0000001a,0 4px 6px -4px #0000001a;--tw-shadow-colored:0 10px 15px -3px var(--tw-shadow-color),0 4px 6px -4px var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.shadow-md{--tw-shadow:0 4px 6px -1px #0000001a,0 2px 4px -2px #0000001a;--tw-shadow-colored:0 4px 6px -1px var(--tw-shadow-color),0 2px 4px -2px var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.shadow-sm{--tw-shadow:0 1px 2px 0 #0000000d;--tw-shadow-colored:0 1px 2px 0 var(--tw-shadow-color);box-shadow:var(--tw-ring-offset-shadow,0 0 #0000),var(--tw-ring-shadow,0 0 #0000),var(--tw-shadow)}.outline-none{outline-offset:2px;outline:2px solid #0000}.outline{outline-style:solid}.ring-2{--tw-ring-offset-shadow:var(--tw-ring-inset)0 0 0 var(--tw-ring-offset-width)var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset)0 0 0 calc(2px + var(--tw-ring-offset-width))var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}.ring-inset{--tw-ring-inset:inset}.ring-primary{--tw-ring-color:hsl(var(--primary))}.ring-primary\/50{--tw-ring-color:hsl(var(--primary)/.5)}.ring-offset-background{--tw-ring-offset-color:hsl(var(--background))}.filter{filter:var(--tw-blur)var(--tw-brightness)var(--tw-contrast)var(--tw-grayscale)var(--tw-hue-rotate)var(--tw-invert)var(--tw-saturate)var(--tw-sepia)var(--tw-drop-shadow)}.transition-colors{transition-property:color,background-color,border-color,text-decoration-color,fill,stroke;transition-duration:.15s;transition-timing-function:cubic-bezier(.4,0,.2,1)}.transition-opacity{transition-property:opacity;transition-duration:.15s;transition-timing-function:cubic-bezier(.4,0,.2,1)}.transition-shadow{transition-property:box-shadow;transition-duration:.15s;transition-timing-function:cubic-bezier(.4,0,.2,1)}.duration-150{transition-duration:.15s}.duration-200{transition-duration:.2s}.ease-out{transition-timing-function:cubic-bezier(0,0,.2,1)}@keyframes enter{0%{opacity:var(--tw-enter-opacity,1);transform:translate3d(var(--tw-enter-translate-x,0),var(--tw-enter-translate-y,0),0)scale3d(var(--tw-enter-scale,1),var(--tw-enter-scale,1),var(--tw-enter-scale,1))rotate(var(--tw-enter-rotate,0))}}@keyframes exit{to{opacity:var(--tw-exit-opacity,1);transform:translate3d(var(--tw-exit-translate-x,0),var(--tw-exit-translate-y,0),0)scale3d(var(--tw-exit-scale,1),var(--tw-exit-scale,1),var(--tw-exit-scale,1))rotate(var(--tw-exit-rotate,0))}}.animate-in{--tw-enter-opacity:initial;--tw-enter-scale:initial;--tw-enter-rotate:initial;--tw-enter-translate-x:initial;--tw-enter-translate-y:initial;animation-name:enter;animation-duration:.15s}.fade-in-0{--tw-enter-opacity:0}.zoom-in-95{--tw-enter-scale:.95}.duration-150{animation-duration:.15s}.duration-200{animation-duration:.2s}.ease-out{animation-timing-function:cubic-bezier(0,0,.2,1)}.running{animation-play-state:running}::-webkit-scrollbar{width:10px;height:10px}::-webkit-scrollbar-track{background:0 0}::-webkit-scrollbar-thumb{background:hsl(var(--muted-foreground)/.2);background-clip:padding-box;border:2px solid #0000;border-radius:3px}::-webkit-scrollbar-thumb:hover{background:hsl(var(--muted-foreground)/.35);background-clip:padding-box;border:2px solid #0000}*{scrollbar-width:thin;scrollbar-color:hsl(var(--muted-foreground)/.25)transparent}.hide-scrollbar::-webkit-scrollbar{display:none}.hide-scrollbar{-ms-overflow-style:none;scrollbar-width:none}.panel-scroll{scrollbar-gutter:stable}.hyperview-dockview{--dv-background-color:hsl(var(--background));--dv-group-view-background-color:hsl(var(--card));--dv-tabs-and-actions-container-background-color:hsl(var(--secondary));--dv-activegroup-visiblepanel-tab-background-color:hsl(var(--card));--dv-activegroup-hiddenpanel-tab-background-color:hsl(var(--secondary));--dv-inactivegroup-visiblepanel-tab-background-color:hsl(var(--secondary));--dv-inactivegroup-hiddenpanel-tab-background-color:hsl(var(--secondary));--dv-activegroup-visiblepanel-tab-color:hsl(var(--foreground));--dv-activegroup-hiddenpanel-tab-color:hsl(var(--muted-foreground));--dv-inactivegroup-visiblepanel-tab-color:hsl(var(--muted-foreground));--dv-inactivegroup-hiddenpanel-tab-color:hsl(var(--muted-foreground));--dv-tabs-and-actions-container-font-size:12px;--dv-tabs-and-actions-container-height:24px;--dv-tab-font-size:12px;--dv-tabs-container-scrollbar-color:hsl(var(--muted-foreground)/.25);--dv-scrollbar-background-color:hsl(var(--muted-foreground)/.25);--dv-tab-divider-color:hsl(var(--border));--dv-separator-border:transparent;--dv-paneview-header-border-color:hsl(var(--border));--dv-sash-color:hsl(var(--border));--dv-icon-hover-background-color:hsl(var(--accent));--dv-active-sash-color:hsl(var(--primary));--dv-drag-over-background-color:hsl(var(--primary)/.15);--dv-drag-over-border-color:hsl(var(--primary)/.6);--dv-tab-margin:0}.hyperview-dockview .dv-groupview{border:none}.hyperview-dockview .dv-tabs-and-actions-container{border-bottom:1px solid hsl(var(--border))}.hyperview-dockview .dv-tab{align-items:center;height:100%;padding:0 8px;line-height:16px;display:flex}.hyperview-dockview .dv-tab .dv-react-part{align-items:center;min-width:0;height:100%;display:flex}.hyperview-dockview .dv-scrollable .dv-scrollbar-horizontal{border-radius:3px;height:6px}.hyperview-dockview .dv-tabs-container{scrollbar-width:none}.hyperview-dockview .dv-tabs-container::-webkit-scrollbar{height:0}.hyperview-dockview .dv-scrollable .dv-scrollbar-horizontal{display:none}.hyperview-dockview .dv-split-view-container>.dv-sash-container>.dv-sash{background-color:#0000!important}.hyperview-dockview .dv-split-view-container.dv-horizontal>.dv-sash-container>.dv-sash:after{content:"";background-color:hsl(var(--border));pointer-events:none;width:1px;position:absolute;top:0;bottom:0;left:50%;transform:translate(-50%)}.hyperview-dockview .dv-split-view-container.dv-horizontal>.dv-sash-container>.dv-sash.dv-enabled:hover:after{background-color:hsl(var(--primary))}.hyperview-dockview .dv-split-view-container.dv-vertical>.dv-sash-container>.dv-sash:after{content:"";background-color:hsl(var(--border));pointer-events:none;height:1px;position:absolute;top:50%;left:0;right:0;transform:translateY(-50%)}.hyperview-dockview .dv-split-view-container.dv-vertical>.dv-sash-container>.dv-sash.dv-enabled:hover:after{background-color:hsl(var(--primary))}.placeholder\:text-muted-foreground::placeholder{color:hsl(var(--muted-foreground))}.placeholder\:text-muted-foreground\/50::placeholder{color:hsl(var(--muted-foreground)/.5)}.hover\:bg-accent:hover{background-color:hsl(var(--accent))}.hover\:bg-destructive\/90:hover{background-color:hsl(var(--destructive)/.9)}.hover\:bg-muted:hover{background-color:hsl(var(--muted))}.hover\:bg-muted-foreground\/40:hover{background-color:hsl(var(--muted-foreground)/.4)}.hover\:bg-muted\/40:hover{background-color:hsl(var(--muted)/.4)}.hover\:bg-muted\/50:hover{background-color:hsl(var(--muted)/.5)}.hover\:bg-muted\/60:hover{background-color:hsl(var(--muted)/.6)}.hover\:bg-primary\/90:hover{background-color:hsl(var(--primary)/.9)}.hover\:bg-secondary\/80:hover{background-color:hsl(var(--secondary)/.8)}.hover\:text-accent-foreground:hover{color:hsl(var(--accent-foreground))}.hover\:text-foreground:hover{color:hsl(var(--foreground))}.hover\:text-muted-foreground:hover{color:hsl(var(--muted-foreground))}.hover\:underline:hover{text-decoration-line:underline}.hover\:opacity-100:hover{opacity:1}.focus\:border-ring:focus{border-color:hsl(var(--ring))}.focus\:bg-accent\/70:focus{background-color:hsl(var(--accent)/.7)}.focus\:text-accent-foreground:focus{color:hsl(var(--accent-foreground))}.focus\:outline-none:focus{outline-offset:2px;outline:2px solid #0000}.focus\:ring-1:focus{--tw-ring-offset-shadow:var(--tw-ring-inset)0 0 0 var(--tw-ring-offset-width)var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset)0 0 0 calc(1px + var(--tw-ring-offset-width))var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}.focus\:ring-2:focus{--tw-ring-offset-shadow:var(--tw-ring-inset)0 0 0 var(--tw-ring-offset-width)var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset)0 0 0 calc(2px + var(--tw-ring-offset-width))var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}.focus\:ring-ring:focus{--tw-ring-color:hsl(var(--ring))}.focus\:ring-offset-2:focus{--tw-ring-offset-width:2px}.focus-visible\:outline-none:focus-visible{outline-offset:2px;outline:2px solid #0000}.focus-visible\:ring-1:focus-visible{--tw-ring-offset-shadow:var(--tw-ring-inset)0 0 0 var(--tw-ring-offset-width)var(--tw-ring-offset-color);--tw-ring-shadow:var(--tw-ring-inset)0 0 0 calc(1px + var(--tw-ring-offset-width))var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow,0 0 #0000)}.focus-visible\:ring-ring:focus-visible{--tw-ring-color:hsl(var(--ring))}.disabled\:pointer-events-none:disabled{pointer-events:none}.disabled\:cursor-not-allowed:disabled{cursor:not-allowed}.disabled\:opacity-50:disabled{opacity:.5}.data-\[disabled\=true\]\:pointer-events-none[data-disabled=true],.data-\[disabled\]\:pointer-events-none[data-disabled]{pointer-events:none}.data-\[state\=on\]\:border-primary[data-state=on]{border-color:hsl(var(--primary))}.data-\[selected\=true\]\:bg-accent[data-selected=true]{background-color:hsl(var(--accent))}.data-\[state\=on\]\:bg-primary[data-state=on]{background-color:hsl(var(--primary))}.data-\[state\=open\]\:bg-accent[data-state=open]{background-color:hsl(var(--accent))}.data-\[state\=open\]\:bg-accent\/70[data-state=open]{background-color:hsl(var(--accent)/.7)}.data-\[selected\=true\]\:text-accent-foreground[data-selected=true]{color:hsl(var(--accent-foreground))}.data-\[state\=on\]\:text-primary-foreground[data-state=on]{color:hsl(var(--primary-foreground))}.data-\[state\=open\]\:text-accent-foreground[data-state=open]{color:hsl(var(--accent-foreground))}.data-\[state\=open\]\:text-muted-foreground[data-state=open]{color:hsl(var(--muted-foreground))}.data-\[disabled\=true\]\:opacity-50[data-disabled=true],.data-\[disabled\]\:opacity-50[data-disabled]{opacity:.5}.data-\[state\=open\]\:animate-in[data-state=open]{--tw-enter-opacity:initial;--tw-enter-scale:initial;--tw-enter-rotate:initial;--tw-enter-translate-x:initial;--tw-enter-translate-y:initial;animation-name:enter;animation-duration:.15s}.data-\[state\=closed\]\:animate-out[data-state=closed]{--tw-exit-opacity:initial;--tw-exit-scale:initial;--tw-exit-rotate:initial;--tw-exit-translate-x:initial;--tw-exit-translate-y:initial;animation-name:exit;animation-duration:.15s}.data-\[state\=closed\]\:fade-out-0[data-state=closed]{--tw-exit-opacity:0}.data-\[state\=open\]\:fade-in-0[data-state=open]{--tw-enter-opacity:0}.data-\[state\=closed\]\:zoom-out-95[data-state=closed]{--tw-exit-scale:.95}.data-\[state\=open\]\:zoom-in-95[data-state=open]{--tw-enter-scale:.95}.data-\[side\=bottom\]\:slide-in-from-top-2[data-side=bottom]{--tw-enter-translate-y:-.5rem}.data-\[side\=left\]\:slide-in-from-right-2[data-side=left]{--tw-enter-translate-x:.5rem}.data-\[side\=right\]\:slide-in-from-left-2[data-side=right]{--tw-enter-translate-x:-.5rem}.data-\[side\=top\]\:slide-in-from-bottom-2[data-side=top]{--tw-enter-translate-y:.5rem}.data-\[state\=closed\]\:slide-out-to-left-1\/2[data-state=closed]{--tw-exit-translate-x:-50%}.data-\[state\=closed\]\:slide-out-to-top-\[48\%\][data-state=closed]{--tw-exit-translate-y:-48%}.data-\[state\=open\]\:slide-in-from-left-1\/2[data-state=open]{--tw-enter-translate-x:-50%}.data-\[state\=open\]\:slide-in-from-top-\[48\%\][data-state=open]{--tw-enter-translate-y:-48%}@media (min-width:640px){.sm\:flex-row{flex-direction:row}.sm\:justify-end{justify-content:flex-end}.sm\:space-x-2>:not([hidden])~:not([hidden]){--tw-space-x-reverse:0;margin-right:calc(.5rem*var(--tw-space-x-reverse));margin-left:calc(.5rem*calc(1 - var(--tw-space-x-reverse)))}.sm\:rounded-lg{border-radius:var(--radius)}.sm\:text-left{text-align:left}}.\[\&\>svg\]\:size-3\.5>svg{width:.875rem;height:.875rem}.\[\&\>svg\]\:shrink-0>svg{flex-shrink:0}.\[\&_\[cmdk-group-heading\]\]\:px-2 [cmdk-group-heading]{padding-left:.5rem;padding-right:.5rem}.\[\&_\[cmdk-group-heading\]\]\:py-1\.5 [cmdk-group-heading]{padding-top:.375rem;padding-bottom:.375rem}.\[\&_\[cmdk-group-heading\]\]\:text-xs [cmdk-group-heading]{font-size:.75rem;line-height:1rem}.\[\&_\[cmdk-group-heading\]\]\:font-medium [cmdk-group-heading]{font-weight:500}.\[\&_\[cmdk-group-heading\]\]\:text-muted-foreground [cmdk-group-heading]{color:hsl(var(--muted-foreground))}.\[\&_\[cmdk-group\]\:not\(\[hidden\]\)_\~\[cmdk-group\]\]\:pt-0 [cmdk-group]:not([hidden])~[cmdk-group]{padding-top:0}.\[\&_\[cmdk-group\]\]\:px-2 [cmdk-group]{padding-left:.5rem;padding-right:.5rem}.\[\&_\[cmdk-input-wrapper\]_svg\]\:h-5 [cmdk-input-wrapper] svg{height:1.25rem}.\[\&_\[cmdk-input-wrapper\]_svg\]\:w-5 [cmdk-input-wrapper] svg{width:1.25rem}.\[\&_\[cmdk-input\]\]\:h-12 [cmdk-input]{height:3rem}.\[\&_\[cmdk-item\]\]\:px-2 [cmdk-item]{padding-left:.5rem;padding-right:.5rem}.\[\&_\[cmdk-item\]\]\:py-3 [cmdk-item]{padding-top:.75rem;padding-bottom:.75rem}.\[\&_\[cmdk-item\]_svg\]\:h-5 [cmdk-item] svg{height:1.25rem}.\[\&_\[cmdk-item\]_svg\]\:w-5 [cmdk-item] svg{width:1.25rem}.\[\&_svg\]\:pointer-events-none svg{pointer-events:none}.\[\&_svg\]\:size-3\.5 svg{width:.875rem;height:.875rem}.\[\&_svg\]\:size-4 svg{width:1rem;height:1rem}.\[\&_svg\]\:shrink-0 svg{flex-shrink:0}
|
vendor/hyperview/server/static/_next/static/chunks/567993cf36cd4ab1.js
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
vendor/hyperview/server/static/_next/static/chunks/86c1fc4cf542f408.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
(globalThis.TURBOPACK||(globalThis.TURBOPACK=[])).push(["object"==typeof document?document.currentScript:void 0,75115,(e,t,r)=>{"use strict";Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"default",{enumerable:!0,get:function(){return s}});let n=e.r(48277),o=e.r(86846),i={fontFamily:'system-ui,"Segoe UI",Roboto,Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji"',height:"100vh",textAlign:"center",display:"flex",flexDirection:"column",alignItems:"center",justifyContent:"center"},u={fontSize:"14px",fontWeight:400,lineHeight:"28px",margin:"0 8px"},s=function({error:e}){let t=e?.digest;return(0,n.jsxs)("html",{id:"__next_error__",children:[(0,n.jsx)("head",{}),(0,n.jsxs)("body",{children:[(0,n.jsx)(o.HandleISRError,{error:e}),(0,n.jsx)("div",{style:i,children:(0,n.jsxs)("div",{children:[(0,n.jsxs)("h2",{style:u,children:["Application error: a ",t?"server":"client","-side exception has occurred while loading ",window.location.hostname," (see the"," ",t?"server logs":"browser console"," for more information)."]}),t?(0,n.jsx)("p",{style:u,children:`Digest: ${t}`}):null]})})]})]})};("function"==typeof r.default||"object"==typeof r.default&&null!==r.default)&&void 0===r.default.__esModule&&(Object.defineProperty(r.default,"__esModule",{value:!0}),Object.assign(r.default,r),t.exports=r.default)},90175,(e,t,r)=>{var n={229:function(e){var t,r,n,o=e.exports={};function i(){throw Error("setTimeout has not been defined")}function u(){throw Error("clearTimeout has not been defined")}try{t="function"==typeof setTimeout?setTimeout:i}catch(e){t=i}try{r="function"==typeof clearTimeout?clearTimeout:u}catch(e){r=u}function s(e){if(t===setTimeout)return setTimeout(e,0);if((t===i||!t)&&setTimeout)return t=setTimeout,setTimeout(e,0);try{return t(e,0)}catch(r){try{return t.call(null,e,0)}catch(r){return t.call(this,e,0)}}}var c=[],a=!1,l=-1;function f(){a&&n&&(a=!1,n.length?c=n.concat(c):l=-1,c.length&&p())}function p(){if(!a){var e=s(f);a=!0;for(var t=c.length;t;){for(n=c,c=[];++l<t;)n&&n[l].run();l=-1,t=c.length}n=null,a=!1,function(e){if(r===clearTimeout)return clearTimeout(e);if((r===u||!r)&&clearTimeout)return r=clearTimeout,clearTimeout(e);try{r(e)}catch(t){try{return r.call(null,e)}catch(t){return r.call(this,e)}}}(e)}}function d(e,t){this.fun=e,this.array=t}function y(){}o.nextTick=function(e){var t=Array(arguments.length-1);if(arguments.length>1)for(var r=1;r<arguments.length;r++)t[r-1]=arguments[r];c.push(new d(e,t)),1!==c.length||a||s(p)},d.prototype.run=function(){this.fun.apply(null,this.array)},o.title="browser",o.browser=!0,o.env={},o.argv=[],o.version="",o.versions={},o.on=y,o.addListener=y,o.once=y,o.off=y,o.removeListener=y,o.removeAllListeners=y,o.emit=y,o.prependListener=y,o.prependOnceListener=y,o.listeners=function(e){return[]},o.binding=function(e){throw Error("process.binding is not supported")},o.cwd=function(){return"/"},o.chdir=function(e){throw Error("process.chdir is not supported")},o.umask=function(){return 0}}},o={};function i(e){var t=o[e];if(void 0!==t)return t.exports;var r=o[e]={exports:{}},u=!0;try{n[e](r,r.exports,i),u=!1}finally{u&&delete o[e]}return r.exports}i.ab="/ROOT/frontend/node_modules/next/dist/compiled/process/",t.exports=i(229)},39057,(e,t,r)=>{"use strict";var n,o;t.exports=(null==(n=e.g.process)?void 0:n.env)&&"object"==typeof(null==(o=e.g.process)?void 0:o.env)?e.g.process:e.r(90175)},14871,(e,t,r)=>{"use strict";var n=Symbol.for("react.transitional.element");function o(e,t,r){var o=null;if(void 0!==r&&(o=""+r),void 0!==t.key&&(o=""+t.key),"key"in t)for(var i in r={},t)"key"!==i&&(r[i]=t[i]);else r=t;return{$$typeof:n,type:e,key:o,ref:void 0!==(t=r.ref)?t:null,props:r}}r.Fragment=Symbol.for("react.fragment"),r.jsx=o,r.jsxs=o},48277,(e,t,r)=>{"use strict";t.exports=e.r(14871)},24171,(e,t,r)=>{"use strict";Object.defineProperty(r,"__esModule",{value:!0});var n={bindSnapshot:function(){return a},createAsyncLocalStorage:function(){return c},createSnapshot:function(){return l}};for(var o in n)Object.defineProperty(r,o,{enumerable:!0,get:n[o]});let i=Object.defineProperty(Error("Invariant: AsyncLocalStorage accessed in runtime where it is not available"),"__NEXT_ERROR_CODE",{value:"E504",enumerable:!1,configurable:!0});class u{disable(){throw i}getStore(){}run(){throw i}exit(){throw i}enterWith(){throw i}static bind(e){return e}}let s="u">typeof globalThis&&globalThis.AsyncLocalStorage;function c(){return s?new s:new u}function a(e){return s?s.bind(e):u.bind(e)}function l(){return s?s.snapshot():function(e,...t){return e(...t)}}},98616,(e,t,r)=>{"use strict";Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"workAsyncStorageInstance",{enumerable:!0,get:function(){return n}});let n=(0,e.r(24171).createAsyncLocalStorage)()},26628,(e,t,r)=>{"use strict";Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"workAsyncStorage",{enumerable:!0,get:function(){return n.workAsyncStorageInstance}});let n=e.r(98616)},86846,(e,t,r)=>{"use strict";Object.defineProperty(r,"__esModule",{value:!0}),Object.defineProperty(r,"HandleISRError",{enumerable:!0,get:function(){return o}});let n="u"<typeof window?e.r(26628).workAsyncStorage:void 0;function o({error:e}){if(n){let t=n.getStore();if(t?.isStaticGeneration)throw e&&console.error(e),e}return null}("function"==typeof r.default||"object"==typeof r.default&&null!==r.default)&&void 0===r.default.__esModule&&(Object.defineProperty(r.default,"__esModule",{value:!0}),Object.assign(r.default,r),t.exports=r.default)},42124,(e,t,r)=>{"use strict";var n=e.i(39057),o=Symbol.for("react.transitional.element"),i=Symbol.for("react.portal"),u=Symbol.for("react.fragment"),s=Symbol.for("react.strict_mode"),c=Symbol.for("react.profiler"),a=Symbol.for("react.consumer"),l=Symbol.for("react.context"),f=Symbol.for("react.forward_ref"),p=Symbol.for("react.suspense"),d=Symbol.for("react.memo"),y=Symbol.for("react.lazy"),h=Symbol.for("react.activity"),g=Symbol.for("react.view_transition"),v=Symbol.iterator,_={isMounted:function(){return!1},enqueueForceUpdate:function(){},enqueueReplaceState:function(){},enqueueSetState:function(){}},m=Object.assign,b={};function S(e,t,r){this.props=e,this.context=t,this.refs=b,this.updater=r||_}function O(){}function E(e,t,r){this.props=e,this.context=t,this.refs=b,this.updater=r||_}S.prototype.isReactComponent={},S.prototype.setState=function(e,t){if("object"!=typeof e&&"function"!=typeof e&&null!=e)throw Error("takes an object of state variables to update or a function which returns an object of state variables.");this.updater.enqueueSetState(this,e,t,"setState")},S.prototype.forceUpdate=function(e){this.updater.enqueueForceUpdate(this,e,"forceUpdate")},O.prototype=S.prototype;var T=E.prototype=new O;T.constructor=E,m(T,S.prototype),T.isPureReactComponent=!0;var w=Array.isArray;function j(){}var R={H:null,A:null,T:null,S:null},x=Object.prototype.hasOwnProperty;function A(e,t,r){var n=r.ref;return{$$typeof:o,type:e,key:t,ref:void 0!==n?n:null,props:r}}function P(e){return"object"==typeof e&&null!==e&&e.$$typeof===o}var C=/\/+/g;function k(e,t){var r,n;return"object"==typeof e&&null!==e&&null!=e.key?(r=""+e.key,n={"=":"=0",":":"=2"},"$"+r.replace(/[=:]/g,function(e){return n[e]})):t.toString(36)}function H(e,t,r){if(null==e)return e;var n=[],u=0;return!function e(t,r,n,u,s){var c,a,l,f=typeof t;("undefined"===f||"boolean"===f)&&(t=null);var p=!1;if(null===t)p=!0;else switch(f){case"bigint":case"string":case"number":p=!0;break;case"object":switch(t.$$typeof){case o:case i:p=!0;break;case y:return e((p=t._init)(t._payload),r,n,u,s)}}if(p)return s=s(t),p=""===u?"."+k(t,0):u,w(s)?(n="",null!=p&&(n=p.replace(C,"$&/")+"/"),e(s,r,n,"",function(e){return e})):null!=s&&(P(s)&&(c=s,a=n+(null==s.key||t&&t.key===s.key?"":(""+s.key).replace(C,"$&/")+"/")+p,s=A(c.type,a,c.props)),r.push(s)),1;p=0;var d=""===u?".":u+":";if(w(t))for(var h=0;h<t.length;h++)f=d+k(u=t[h],h),p+=e(u,r,n,f,s);else if("function"==typeof(h=null===(l=t)||"object"!=typeof l?null:"function"==typeof(l=v&&l[v]||l["@@iterator"])?l:null))for(t=h.call(t),h=0;!(u=t.next()).done;)f=d+k(u=u.value,h++),p+=e(u,r,n,f,s);else if("object"===f){if("function"==typeof t.then)return e(function(e){switch(e.status){case"fulfilled":return e.value;case"rejected":throw e.reason;default:switch("string"==typeof e.status?e.then(j,j):(e.status="pending",e.then(function(t){"pending"===e.status&&(e.status="fulfilled",e.value=t)},function(t){"pending"===e.status&&(e.status="rejected",e.reason=t)})),e.status){case"fulfilled":return e.value;case"rejected":throw e.reason}}throw e}(t),r,n,u,s);throw Error("Objects are not valid as a React child (found: "+("[object Object]"===(r=String(t))?"object with keys {"+Object.keys(t).join(", ")+"}":r)+"). If you meant to render a collection of children, use an array instead.")}return p}(e,n,"","",function(e){return t.call(r,e,u++)}),n}function L(e){if(-1===e._status){var t=e._result;(t=t()).then(function(t){(0===e._status||-1===e._status)&&(e._status=1,e._result=t)},function(t){(0===e._status||-1===e._status)&&(e._status=2,e._result=t)}),-1===e._status&&(e._status=0,e._result=t)}if(1===e._status)return e._result.default;throw e._result}var I="function"==typeof reportError?reportError:function(e){if("object"==typeof window&&"function"==typeof window.ErrorEvent){var t=new window.ErrorEvent("error",{bubbles:!0,cancelable:!0,message:"object"==typeof e&&null!==e&&"string"==typeof e.message?String(e.message):String(e),error:e});if(!window.dispatchEvent(t))return}else if("object"==typeof n.default&&"function"==typeof n.default.emit)return void n.default.emit("uncaughtException",e);console.error(e)};function M(e){var t=R.T,r={};r.types=null!==t?t.types:null,R.T=r;try{var n=e(),o=R.S;null!==o&&o(r,n),"object"==typeof n&&null!==n&&"function"==typeof n.then&&n.then(j,I)}catch(e){I(e)}finally{null!==t&&null!==r.types&&(t.types=r.types),R.T=t}}function N(e){var t=R.T;if(null!==t){var r=t.types;null===r?t.types=[e]:-1===r.indexOf(e)&&r.push(e)}else M(N.bind(null,e))}r.Activity=h,r.Children={map:H,forEach:function(e,t,r){H(e,function(){t.apply(this,arguments)},r)},count:function(e){var t=0;return H(e,function(){t++}),t},toArray:function(e){return H(e,function(e){return e})||[]},only:function(e){if(!P(e))throw Error("React.Children.only expected to receive a single React element child.");return e}},r.Component=S,r.Fragment=u,r.Profiler=c,r.PureComponent=E,r.StrictMode=s,r.Suspense=p,r.ViewTransition=g,r.__CLIENT_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE=R,r.__COMPILER_RUNTIME={__proto__:null,c:function(e){return R.H.useMemoCache(e)}},r.addTransitionType=N,r.cache=function(e){return function(){return e.apply(null,arguments)}},r.cacheSignal=function(){return null},r.cloneElement=function(e,t,r){if(null==e)throw Error("The argument must be a React element, but you passed "+e+".");var n=m({},e.props),o=e.key;if(null!=t)for(i in void 0!==t.key&&(o=""+t.key),t)x.call(t,i)&&"key"!==i&&"__self"!==i&&"__source"!==i&&("ref"!==i||void 0!==t.ref)&&(n[i]=t[i]);var i=arguments.length-2;if(1===i)n.children=r;else if(1<i){for(var u=Array(i),s=0;s<i;s++)u[s]=arguments[s+2];n.children=u}return A(e.type,o,n)},r.createContext=function(e){return(e={$$typeof:l,_currentValue:e,_currentValue2:e,_threadCount:0,Provider:null,Consumer:null}).Provider=e,e.Consumer={$$typeof:a,_context:e},e},r.createElement=function(e,t,r){var n,o={},i=null;if(null!=t)for(n in void 0!==t.key&&(i=""+t.key),t)x.call(t,n)&&"key"!==n&&"__self"!==n&&"__source"!==n&&(o[n]=t[n]);var u=arguments.length-2;if(1===u)o.children=r;else if(1<u){for(var s=Array(u),c=0;c<u;c++)s[c]=arguments[c+2];o.children=s}if(e&&e.defaultProps)for(n in u=e.defaultProps)void 0===o[n]&&(o[n]=u[n]);return A(e,i,o)},r.createRef=function(){return{current:null}},r.forwardRef=function(e){return{$$typeof:f,render:e}},r.isValidElement=P,r.lazy=function(e){return{$$typeof:y,_payload:{_status:-1,_result:e},_init:L}},r.memo=function(e,t){return{$$typeof:d,type:e,compare:void 0===t?null:t}},r.startTransition=M,r.unstable_useCacheRefresh=function(){return R.H.useCacheRefresh()},r.use=function(e){return R.H.use(e)},r.useActionState=function(e,t,r){return R.H.useActionState(e,t,r)},r.useCallback=function(e,t){return R.H.useCallback(e,t)},r.useContext=function(e){return R.H.useContext(e)},r.useDebugValue=function(){},r.useDeferredValue=function(e,t){return R.H.useDeferredValue(e,t)},r.useEffect=function(e,t){return R.H.useEffect(e,t)},r.useEffectEvent=function(e){return R.H.useEffectEvent(e)},r.useId=function(){return R.H.useId()},r.useImperativeHandle=function(e,t,r){return R.H.useImperativeHandle(e,t,r)},r.useInsertionEffect=function(e,t){return R.H.useInsertionEffect(e,t)},r.useLayoutEffect=function(e,t){return R.H.useLayoutEffect(e,t)},r.useMemo=function(e,t){return R.H.useMemo(e,t)},r.useOptimistic=function(e,t){return R.H.useOptimistic(e,t)},r.useReducer=function(e,t,r){return R.H.useReducer(e,t,r)},r.useRef=function(e){return R.H.useRef(e)},r.useState=function(e){return R.H.useState(e)},r.useSyncExternalStore=function(e,t,r){return R.H.useSyncExternalStore(e,t,r)},r.useTransition=function(){return R.H.useTransition()},r.version="19.3.0-canary-f93b9fd4-20251217"},30668,(e,t,r)=>{"use strict";t.exports=e.r(42124)},15501,(e,t,r)=>{"use strict";var n=e.r(30668);function o(e){var t="https://react.dev/errors/"+e;if(1<arguments.length){t+="?args[]="+encodeURIComponent(arguments[1]);for(var r=2;r<arguments.length;r++)t+="&args[]="+encodeURIComponent(arguments[r])}return"Minified React error #"+e+"; visit "+t+" for the full message or use the non-minified dev environment for full errors and additional helpful warnings."}function i(){}var u={d:{f:i,r:function(){throw Error(o(522))},D:i,C:i,L:i,m:i,X:i,S:i,M:i},p:0,findDOMNode:null},s=Symbol.for("react.portal"),c=Symbol.for("react.optimistic_key"),a=n.__CLIENT_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE;function l(e,t){return"font"===e?"":"string"==typeof t?"use-credentials"===t?t:"":void 0}r.__DOM_INTERNALS_DO_NOT_USE_OR_WARN_USERS_THEY_CANNOT_UPGRADE=u,r.createPortal=function(e,t){var r=2<arguments.length&&void 0!==arguments[2]?arguments[2]:null;if(!t||1!==t.nodeType&&9!==t.nodeType&&11!==t.nodeType)throw Error(o(299));return function(e,t,r){var n=3<arguments.length&&void 0!==arguments[3]?arguments[3]:null;return{$$typeof:s,key:null==n?null:n===c?c:""+n,children:e,containerInfo:t,implementation:r}}(e,t,null,r)},r.flushSync=function(e){var t=a.T,r=u.p;try{if(a.T=null,u.p=2,e)return e()}finally{a.T=t,u.p=r,u.d.f()}},r.preconnect=function(e,t){"string"==typeof e&&(t=t?"string"==typeof(t=t.crossOrigin)?"use-credentials"===t?t:"":void 0:null,u.d.C(e,t))},r.prefetchDNS=function(e){"string"==typeof e&&u.d.D(e)},r.preinit=function(e,t){if("string"==typeof e&&t&&"string"==typeof t.as){var r=t.as,n=l(r,t.crossOrigin),o="string"==typeof t.integrity?t.integrity:void 0,i="string"==typeof t.fetchPriority?t.fetchPriority:void 0;"style"===r?u.d.S(e,"string"==typeof t.precedence?t.precedence:void 0,{crossOrigin:n,integrity:o,fetchPriority:i}):"script"===r&&u.d.X(e,{crossOrigin:n,integrity:o,fetchPriority:i,nonce:"string"==typeof t.nonce?t.nonce:void 0})}},r.preinitModule=function(e,t){if("string"==typeof e)if("object"==typeof t&&null!==t){if(null==t.as||"script"===t.as){var r=l(t.as,t.crossOrigin);u.d.M(e,{crossOrigin:r,integrity:"string"==typeof t.integrity?t.integrity:void 0,nonce:"string"==typeof t.nonce?t.nonce:void 0})}}else null==t&&u.d.M(e)},r.preload=function(e,t){if("string"==typeof e&&"object"==typeof t&&null!==t&&"string"==typeof t.as){var r=t.as,n=l(r,t.crossOrigin);u.d.L(e,r,{crossOrigin:n,integrity:"string"==typeof t.integrity?t.integrity:void 0,nonce:"string"==typeof t.nonce?t.nonce:void 0,type:"string"==typeof t.type?t.type:void 0,fetchPriority:"string"==typeof t.fetchPriority?t.fetchPriority:void 0,referrerPolicy:"string"==typeof t.referrerPolicy?t.referrerPolicy:void 0,imageSrcSet:"string"==typeof t.imageSrcSet?t.imageSrcSet:void 0,imageSizes:"string"==typeof t.imageSizes?t.imageSizes:void 0,media:"string"==typeof t.media?t.media:void 0})}},r.preloadModule=function(e,t){if("string"==typeof e)if(t){var r=l(t.as,t.crossOrigin);u.d.m(e,{as:"string"==typeof t.as&&"script"!==t.as?t.as:void 0,crossOrigin:r,integrity:"string"==typeof t.integrity?t.integrity:void 0})}else u.d.m(e)},r.requestFormReset=function(e){u.d.r(e)},r.unstable_batchedUpdates=function(e,t){return e(t)},r.useFormState=function(e,t,r){return a.H.useFormState(e,t,r)},r.useFormStatus=function(){return a.H.useHostTransitionStatus()},r.version="19.3.0-canary-f93b9fd4-20251217"},69941,(e,t,r)=>{"use strict";!function e(){if("u">typeof __REACT_DEVTOOLS_GLOBAL_HOOK__&&"function"==typeof __REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE)try{__REACT_DEVTOOLS_GLOBAL_HOOK__.checkDCE(e)}catch(e){console.error(e)}}(),t.exports=e.r(15501)}]);
|
vendor/hyperview/server/static/_next/static/chunks/8d5a2ef3447cb3ee.js
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|