Makatia's picture
Upload config.yaml with huggingface_hub
0bbc981 verified
# Problem A: Few-Shot Defect Classification - Configuration
# Intel contest: 8 defect classes, grayscale images up to ~7000x5600
# Uses ONLY official challenge data from challenge/dataset/Dataset/Data/
# Dataset: defect1(253), defect2(178), defect3(9), defect4(14),
# defect5(411), defect8(803), defect9(319), defect10(674), good(7135)
# Contest: classify into 8 DEFECT classes
#
# CRITICAL: defect3~defect9 (0.963 cosine sim) and defect4~defect8 (0.889)
# are nearly identical without training on them. ALL 8 classes must be in
# training so the backbone learns to separate these similar pairs.
data:
root: "../challenge/dataset/Dataset/Data/"
img_size: 518 # DINOv2 native resolution (37x14 patches)
defect_only: false
# ALL 8 defect classes + good (class 0) in training
train_classes: [0, 1, 2, 3, 4, 5, 8, 9, 10]
test_classes: [3, 4] # Monitor the hardest classes during validation
all_classes: [0, 1, 2, 3, 4, 5, 8, 9, 10]
model:
backbone: "dinov2"
backbone_size: "large" # DINOv2 ViT-L/14 (1024-dim, 304M params)
freeze_backbone: true
unfreeze_last_n: 6 # Fine-tune last 6 transformer blocks + norm
grad_checkpointing: true
proj_hidden: 768
proj_dim: 512
training:
n_way: 9 # ALL 9 classes per episode (8 defect + good)
k_shot: 5 # Higher shot count for better prototypes
n_query: 10 # More queries = stronger gradient signal
# Sampler uses replacement for rare classes (defect3=9, defect4=14)
n_episodes_train: 500 # Fewer but harder 8-way episodes
n_episodes_val: 100
epochs: 100
lr: 3.0e-4
lr_backbone: 5.0e-6
warmup_epochs: 5
weight_decay: 1.0e-4
use_amp: true
gradient_clip: 1.0
label_smoothing: 0.1 # Prevent overconfidence on easy classes
patience: 20
evaluation:
n_seeds: 5
max_examples: 50
kshot_values: [1, 3, 5, 10, 20]
target_accuracy: 0.85
output:
checkpoint_dir: "checkpoints/"
results_dir: "outputs/"
seed: 42