| from __future__ import annotations |
|
|
| from dataclasses import asdict, dataclass |
| import json |
| from pathlib import Path |
| from typing import Any, Dict |
|
|
|
|
| @dataclass |
| class ProtoMorphConfig: |
| """Configuration for the custom ProtoMorph head. |
| |
| The frozen DINOv3 backbone is loaded separately from Hugging Face. The |
| safetensors checkpoint stores only the trainable experimental head, which is |
| what you will train/tune for your dataset. |
| """ |
|
|
| dino_model_name: str = "facebook/dinov3-vits16-pretrain-lvd1689m" |
| num_classes: int = 10 |
| embed_dim: int = 384 |
| patch_size: int = 16 |
|
|
| |
| proto_count: int = 64 |
| memory_tokens: int = 16 |
| rbf_count: int = 128 |
| num_heads: int = 8 |
| dropout: float = 0.0 |
|
|
| |
| hard_pmax_threshold: float = 0.65 |
| hard_margin_threshold: float = 0.15 |
| hard_entropy_threshold: float = 1.35 |
|
|
| |
| image_size: int = 512 |
| use_bf16_autocast: bool = True |
| normalize_patch_tokens: bool = True |
|
|
| @classmethod |
| def from_json(cls, path: str | Path) -> "ProtoMorphConfig": |
| data = json.loads(Path(path).read_text()) |
| return cls(**data) |
|
|
| def to_dict(self) -> Dict[str, Any]: |
| return asdict(self) |
|
|
| def to_json(self, path: str | Path) -> None: |
| p = Path(path) |
| p.parent.mkdir(parents=True, exist_ok=True) |
| p.write_text(json.dumps(self.to_dict(), indent=2) + "\n") |
|
|