File size: 1,455 Bytes
63089c1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
from __future__ import annotations

from dataclasses import asdict, dataclass
import json
from pathlib import Path
from typing import Any, Dict


@dataclass
class ProtoMorphConfig:
    """Configuration for the custom ProtoMorph head.

    The frozen DINOv3 backbone is loaded separately from Hugging Face. The
    safetensors checkpoint stores only the trainable experimental head, which is
    what you will train/tune for your dataset.
    """

    dino_model_name: str = "facebook/dinov3-vits16-pretrain-lvd1689m"
    num_classes: int = 10
    embed_dim: int = 384
    patch_size: int = 16

    # ProtoMorph blocks
    proto_count: int = 64
    memory_tokens: int = 16
    rbf_count: int = 128
    num_heads: int = 8
    dropout: float = 0.0

    # Hard-case gate thresholds
    hard_pmax_threshold: float = 0.65
    hard_margin_threshold: float = 0.15
    hard_entropy_threshold: float = 1.35

    # Inference / performance knobs
    image_size: int = 512
    use_bf16_autocast: bool = True
    normalize_patch_tokens: bool = True

    @classmethod
    def from_json(cls, path: str | Path) -> "ProtoMorphConfig":
        data = json.loads(Path(path).read_text())
        return cls(**data)

    def to_dict(self) -> Dict[str, Any]:
        return asdict(self)

    def to_json(self, path: str | Path) -> None:
        p = Path(path)
        p.parent.mkdir(parents=True, exist_ok=True)
        p.write_text(json.dumps(self.to_dict(), indent=2) + "\n")