File size: 818 Bytes
ed72eb8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
import torch
from safetensors.torch import load_file
def load_model(path='model.safetensors'):
return load_file(path)
def decode_4to16(a3, a2, a1, a0, weights):
"""4-to-16 decoder: converts 4-bit binary to one-hot 16-bit output."""
inp = torch.tensor([float(a3), float(a2), float(a1), float(a0)])
outputs = []
for i in range(16):
y = int((inp * weights[f'y{i}.weight']).sum() + weights[f'y{i}.bias'] >= 0)
outputs.append(y)
return outputs
if __name__ == '__main__':
w = load_model()
print('4-to-16 Decoder')
for val in range(16):
a3, a2, a1, a0 = (val >> 3) & 1, (val >> 2) & 1, (val >> 1) & 1, val & 1
outputs = decode_4to16(a3, a2, a1, a0, w)
print(f" {val:2d} ({a3}{a2}{a1}{a0}) -> {''.join(map(str, outputs))}")
|