File size: 1,810 Bytes
b827152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
import torch
from safetensors.torch import load_file

def load_model(path='model.safetensors'):
    return load_file(path)

def ctz4(x3, x2, x1, x0, weights):
    """4-bit count trailing zeros: returns number of trailing 0 bits (0-4)."""
    inp = torch.tensor([float(x3), float(x2), float(x1), float(x0)])

    # Layer 1: prefix conditions
    p0 = int((inp @ weights['p0.weight'].T + weights['p0.bias'] >= 0).item())
    p01 = int((inp @ weights['p01.weight'].T + weights['p01.bias'] >= 0).item())
    p012 = int((inp @ weights['p012.weight'].T + weights['p012.bias'] >= 0).item())
    all_zero = int((inp @ weights['all_zero.weight'].T + weights['all_zero.bias'] >= 0).item())

    # Layer 2: one-hot position
    z1 = int((torch.tensor([float(p0), float(x1)]) @ weights['z1.weight'].T + weights['z1.bias'] >= 0).item())
    z2 = int((torch.tensor([float(p01), float(x2)]) @ weights['z2.weight'].T + weights['z2.bias'] >= 0).item())
    z3 = int((torch.tensor([float(p012), float(x3)]) @ weights['z3.weight'].T + weights['z3.bias'] >= 0).item())

    # Layer 3: binary encoding
    y2 = int((torch.tensor([float(all_zero)]) @ weights['y2.weight'].T + weights['y2.bias'] >= 0).item())
    y1 = int((torch.tensor([float(z2), float(z3)]) @ weights['y1.weight'].T + weights['y1.bias'] >= 0).item())
    y0 = int((torch.tensor([float(z1), float(z3)]) @ weights['y0.weight'].T + weights['y0.bias'] >= 0).item())

    return y2, y1, y0

if __name__ == '__main__':
    w = load_model()
    print('CTZ4 examples:')
    examples = [0b0001, 0b0010, 0b0100, 0b1000, 0b0000, 0b0110, 0b1100]
    for val in examples:
        x3, x2, x1, x0 = (val >> 3) & 1, (val >> 2) & 1, (val >> 1) & 1, val & 1
        y2, y1, y0 = ctz4(x3, x2, x1, x0, w)
        count = y2 * 4 + y1 * 2 + y0
        print(f'  {val:04b} -> {count}')