phanerozoic's picture
Complete circuit repo: model.py, config.json, CofiberDecomposition.v, README
4d7f570 verified
"""
Cofiber detection threshold circuit — standalone inference.
Loads model.safetensors and runs multi-scale object detection on a
feature tensor from any frozen vision backbone.
Usage:
import torch
from model import CofiberDetector
detector = CofiberDetector.from_safetensors("model.safetensors")
features = torch.randn(768, 40, 40) # from frozen ViT at stride 16
detections = detector.detect(features, score_thresh=0.3)
"""
import json
from pathlib import Path
import numpy as np
import torch
from safetensors.torch import load_file
def heaviside(x):
return (x >= 0).float()
class CofiberDetector:
"""Depth-3 threshold circuit for multi-scale object detection.
Layer 0: Average pool 2x (fixed weights, zero parameters)
Layer 1: Cofiber = x - upsample(pool(x)) (fixed weights, zero parameters)
Layer 2: Classify = H(w . cofib + b) (INT8 learned weights)
"""
def __init__(self, prototypes, biases, scale_factor, n_scales=3):
self.prototypes = prototypes
self.biases = biases
self.scale_factor = scale_factor
self.n_scales = n_scales
self.num_classes = prototypes.shape[0]
self.feat_dim = prototypes.shape[1]
@classmethod
def from_safetensors(cls, path):
tensors = load_file(str(path))
prototypes = tensors["classify.weight"]
biases = tensors["classify.bias"]
scale_factor = tensors["classify.scale_factor"].item()
# Dequantize from INT8 representation
prototypes = prototypes / scale_factor
biases = biases / scale_factor
return cls(prototypes.numpy(), biases.numpy(), scale_factor)
def _pool(self, x):
"""Layer 0: Average pool 2x. Fixed weights {0.25, 0.25, 0.25, 0.25}."""
return (x[:, 0::2, 0::2] + x[:, 0::2, 1::2] +
x[:, 1::2, 0::2] + x[:, 1::2, 1::2]) / 4
def _cofiber(self, x, pooled):
"""Layer 1: Subtract. cofib = x - upsample(pool(x)). Fixed weights {1, -1}.
Uses nearest-neighbor upsample for exact integer arithmetic."""
h, w = x.shape[1], x.shape[2]
upsampled = np.repeat(np.repeat(pooled, 2, axis=1), 2, axis=2)[:, :h, :w]
return x - upsampled
def _classify(self, features, stride):
"""Layer 2: H(w . features + b). One threshold gate per (location, class)."""
C, h, w = features.shape
flat = features.reshape(C, -1).T # (H*W, C)
logits = flat @ self.prototypes.T + self.biases # (H*W, num_classes)
# Heaviside: fire if logit >= 0
detections = []
for loc in range(h * w):
for cls in range(self.num_classes):
if logits[loc, cls] >= 0:
yi, xi = loc // w, loc % w
# Box: centered on patch location, size proportional to stride
cx = (xi + 0.5) * stride
cy = (yi + 0.5) * stride
half = stride * 2
detections.append({
"box": [cx - half, cy - half, cx + half, cy + half],
"score": float(logits[loc, cls]),
"label": int(cls),
"scale": stride,
})
return detections
def detect(self, features, score_thresh=0.3):
"""Run the full 3-layer circuit.
Args:
features: numpy array (C, H, W) from a frozen backbone at stride 16
score_thresh: minimum logit value to report a detection
Returns:
list of {"box": [x1,y1,x2,y2], "score": float, "label": int, "scale": int}
"""
if isinstance(features, torch.Tensor):
features = features.numpy()
all_dets = []
f = features
strides = [16 * (2 ** i) for i in range(self.n_scales)]
for scale_idx in range(self.n_scales):
stride = strides[scale_idx]
if scale_idx < self.n_scales - 1:
pooled = self._pool(f)
cofib = self._cofiber(f, pooled)
dets = self._classify(cofib, stride)
f = pooled
else:
dets = self._classify(f, stride)
all_dets.extend([d for d in dets if d["score"] >= score_thresh])
return all_dets
@property
def param_count(self):
return self.prototypes.size + self.biases.size
@property
def gate_count(self):
"""Total threshold gates across all scales."""
total = 0
h, w = 40, 40
for s in range(self.n_scales):
pool_gates = (h // 2) * (w // 2) * self.feat_dim
subtract_gates = h * w * self.feat_dim
classify_gates = h * w * self.num_classes
total += pool_gates + subtract_gates + classify_gates
h, w = h // 2, w // 2
return total
if __name__ == "__main__":
path = Path(__file__).parent / "model.safetensors"
if not path.exists():
print(f"model.safetensors not found at {path}")
exit(1)
detector = CofiberDetector.from_safetensors(path)
print(f"Loaded cofiber detector: {detector.param_count:,} params, {detector.gate_count:,} gates")
# Test on random features
np.random.seed(42)
features = np.random.randn(768, 40, 40).astype(np.float32)
dets = detector.detect(features, score_thresh=0.0)
print(f"Detections on random input (thresh=0.0): {len(dets)}")
dets_thresh = detector.detect(features, score_thresh=0.3)
print(f"Detections on random input (thresh=0.3): {len(dets_thresh)}")