Upload folder using huggingface_hub
Browse files- README.md +37 -0
- config.json +9 -0
- create_safetensors.py +84 -0
- model.safetensors +3 -0
README.md
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: mit
|
| 3 |
+
tags:
|
| 4 |
+
- pytorch
|
| 5 |
+
- safetensors
|
| 6 |
+
- threshold-logic
|
| 7 |
+
- neuromorphic
|
| 8 |
+
- error-correction
|
| 9 |
+
- bch
|
| 10 |
+
- hamming
|
| 11 |
+
---
|
| 12 |
+
|
| 13 |
+
# threshold-bch-decoder
|
| 14 |
+
|
| 15 |
+
BCH(7,4) decoder. Extracts data bits and computes syndrome.
|
| 16 |
+
|
| 17 |
+
## Decoding
|
| 18 |
+
|
| 19 |
+
```
|
| 20 |
+
R6 R5 R4 R3 R2 R1 R0 → D3 D2 D1 D0 | S2 S1 S0
|
| 21 |
+
────────── ────────
|
| 22 |
+
data syndrome
|
| 23 |
+
```
|
| 24 |
+
|
| 25 |
+
Syndrome = 0 means no error.
|
| 26 |
+
|
| 27 |
+
## Parameters
|
| 28 |
+
|
| 29 |
+
| | |
|
| 30 |
+
|---|---|
|
| 31 |
+
| Inputs | 7 |
|
| 32 |
+
| Outputs | 7 |
|
| 33 |
+
| Parameters | 128 |
|
| 34 |
+
|
| 35 |
+
## License
|
| 36 |
+
|
| 37 |
+
MIT
|
config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"name": "threshold-bch-decoder",
|
| 3 |
+
"description": "BCH(7,4) / Hamming decoder",
|
| 4 |
+
"inputs": 7,
|
| 5 |
+
"outputs": 7,
|
| 6 |
+
"neurons": 16,
|
| 7 |
+
"layers": 2,
|
| 8 |
+
"parameters": 128
|
| 9 |
+
}
|
create_safetensors.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from safetensors.torch import save_file
|
| 3 |
+
|
| 4 |
+
weights = {}
|
| 5 |
+
|
| 6 |
+
# BCH(7,4) Decoder / Hamming(7,4) Decoder
|
| 7 |
+
# Computes syndrome and passes through data bits
|
| 8 |
+
|
| 9 |
+
def add_neuron(name, w_list, bias):
|
| 10 |
+
weights[f'{name}.weight'] = torch.tensor([w_list], dtype=torch.float32)
|
| 11 |
+
weights[f'{name}.bias'] = torch.tensor([bias], dtype=torch.float32)
|
| 12 |
+
|
| 13 |
+
# Input: R6, R5, R4, R3, R2, R1, R0 (7-bit received word)
|
| 14 |
+
# Output: D3, D2, D1, D0 (4 data bits), S2, S1, S0 (syndrome)
|
| 15 |
+
|
| 16 |
+
# Pass through data bits
|
| 17 |
+
add_neuron('d3', [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], -1.0) # R6
|
| 18 |
+
add_neuron('d2', [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0], -1.0) # R5
|
| 19 |
+
add_neuron('d1', [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], -1.0) # R4
|
| 20 |
+
add_neuron('d0', [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], -1.0) # R3
|
| 21 |
+
|
| 22 |
+
# Syndrome bits (parity checks)
|
| 23 |
+
# S0 checks positions 1,3,5,7 -> R6,R4,R2,R0 (indices 0,2,4,6)
|
| 24 |
+
add_neuron('s0_at1', [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], -1.0)
|
| 25 |
+
add_neuron('s0_at2', [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], -2.0)
|
| 26 |
+
add_neuron('s0_at3', [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], -3.0)
|
| 27 |
+
add_neuron('s0_at4', [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0], -4.0)
|
| 28 |
+
|
| 29 |
+
# S1 checks positions 2,3,6,7 -> R6,R5,R2,R1 (indices 0,1,4,5)
|
| 30 |
+
add_neuron('s1_at1', [1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0], -1.0)
|
| 31 |
+
add_neuron('s1_at2', [1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0], -2.0)
|
| 32 |
+
add_neuron('s1_at3', [1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0], -3.0)
|
| 33 |
+
add_neuron('s1_at4', [1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0], -4.0)
|
| 34 |
+
|
| 35 |
+
# S2 checks positions 4,5,6,7 -> R6,R5,R4,R2 (indices 0,1,2,4)
|
| 36 |
+
add_neuron('s2_at1', [1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0], -1.0)
|
| 37 |
+
add_neuron('s2_at2', [1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0], -2.0)
|
| 38 |
+
add_neuron('s2_at3', [1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0], -3.0)
|
| 39 |
+
add_neuron('s2_at4', [1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0], -4.0)
|
| 40 |
+
|
| 41 |
+
save_file(weights, 'model.safetensors')
|
| 42 |
+
|
| 43 |
+
def xor4(a, b, c, d):
|
| 44 |
+
return a ^ b ^ c ^ d
|
| 45 |
+
|
| 46 |
+
def bch_decode(r6, r5, r4, r3, r2, r1, r0):
|
| 47 |
+
# Syndrome computation
|
| 48 |
+
s0 = xor4(r6, r4, r2, r0)
|
| 49 |
+
s1 = xor4(r6, r5, r2, r1)
|
| 50 |
+
s2 = xor4(r6, r5, r4, r2)
|
| 51 |
+
|
| 52 |
+
# Data bits (without correction for simplicity)
|
| 53 |
+
d3, d2, d1, d0 = r6, r5, r4, r3
|
| 54 |
+
|
| 55 |
+
return d3, d2, d1, d0, s2, s1, s0
|
| 56 |
+
|
| 57 |
+
print("Verifying BCH(7,4) decoder...")
|
| 58 |
+
errors = 0
|
| 59 |
+
|
| 60 |
+
# Test with valid codewords
|
| 61 |
+
def encode(d3, d2, d1, d0):
|
| 62 |
+
c2 = d3 ^ d2 ^ d0
|
| 63 |
+
c1 = d3 ^ d1 ^ d0
|
| 64 |
+
c0 = d2 ^ d1 ^ d0
|
| 65 |
+
return d3, d2, d1, d0, c2, c1, c0
|
| 66 |
+
|
| 67 |
+
for d in range(16):
|
| 68 |
+
d3, d2, d1, d0 = (d>>3)&1, (d>>2)&1, (d>>1)&1, d&1
|
| 69 |
+
codeword = encode(d3, d2, d1, d0)
|
| 70 |
+
decoded = bch_decode(*codeword)
|
| 71 |
+
|
| 72 |
+
# Check data extraction
|
| 73 |
+
if decoded[:4] != (d3, d2, d1, d0):
|
| 74 |
+
errors += 1
|
| 75 |
+
print(f"Data error for d={d}")
|
| 76 |
+
|
| 77 |
+
if errors == 0:
|
| 78 |
+
print("All 16 test cases passed!")
|
| 79 |
+
else:
|
| 80 |
+
print(f"FAILED: {errors} errors")
|
| 81 |
+
|
| 82 |
+
mag = sum(t.abs().sum().item() for t in weights.values())
|
| 83 |
+
print(f"Magnitude: {mag:.0f}")
|
| 84 |
+
print(f"Parameters: {sum(t.numel() for t in weights.values())}")
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:698843e1ff01b6cf722e6099aa859d48caf6be3b068b81f058c7e3cfe84605bc
|
| 3 |
+
size 2688
|