phanerozoic commited on
Commit
cf789e5
·
verified ·
1 Parent(s): 6716fa5

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. README.md +72 -72
  2. config.json +9 -9
  3. create_safetensors.py +61 -61
  4. model.py +26 -26
README.md CHANGED
@@ -1,72 +1,72 @@
1
- ---
2
- license: mit
3
- tags:
4
- - pytorch
5
- - safetensors
6
- - threshold-logic
7
- - neuromorphic
8
- ---
9
-
10
- # threshold-parity8
11
-
12
- 8-bit parity function. Outputs 1 if odd number of inputs are high.
13
-
14
- ## Function
15
-
16
- parity8(b0..b7) = b0 XOR b1 XOR b2 XOR b3 XOR b4 XOR b5 XOR b6 XOR b7
17
-
18
- ## Architecture
19
-
20
- Balanced tree of 7 XOR2 gates:
21
-
22
- ```
23
- b0 b1 b2 b3 b4 b5 b6 b7
24
- \/ \/ \/ \/
25
- xor01 xor23 xor45 xor67 (Level 1: 4 gates)
26
- \ / \ /
27
- xor0123 xor4567 (Level 2: 2 gates)
28
- \ /
29
- xor_final (Level 3: 1 gate)
30
- ```
31
-
32
- Each XOR2 uses OR-NAND-AND structure (3 neurons, 9 params, magnitude 10).
33
-
34
- ## Parameters
35
-
36
- | | |
37
- |---|---|
38
- | Inputs | 8 |
39
- | Outputs | 1 |
40
- | Neurons | 21 |
41
- | Layers | 6 |
42
- | Parameters | 63 |
43
- | Magnitude | 70 |
44
-
45
- ## Usage
46
-
47
- ```python
48
- from safetensors.torch import load_file
49
-
50
- w = load_file('model.safetensors')
51
-
52
- def xor2(a, b, prefix):
53
- or_out = int(a * w[f'{prefix}.or.weight'][0] + b * w[f'{prefix}.or.weight'][1] + w[f'{prefix}.or.bias'] >= 0)
54
- nand_out = int(a * w[f'{prefix}.nand.weight'][0] + b * w[f'{prefix}.nand.weight'][1] + w[f'{prefix}.nand.bias'] >= 0)
55
- return int(or_out * w[f'{prefix}.and.weight'][0] + nand_out * w[f'{prefix}.and.weight'][1] + w[f'{prefix}.and.bias'] >= 0)
56
-
57
- def parity8(bits):
58
- x01 = xor2(bits[0], bits[1], 'xor_01')
59
- x23 = xor2(bits[2], bits[3], 'xor_23')
60
- x45 = xor2(bits[4], bits[5], 'xor_45')
61
- x67 = xor2(bits[6], bits[7], 'xor_67')
62
- x0123 = xor2(x01, x23, 'xor_0123')
63
- x4567 = xor2(x45, x67, 'xor_4567')
64
- return xor2(x0123, x4567, 'xor_final')
65
-
66
- print(parity8([1, 0, 1, 0, 1, 0, 1, 0])) # 0 (even)
67
- print(parity8([1, 1, 1, 0, 0, 0, 0, 0])) # 1 (odd)
68
- ```
69
-
70
- ## License
71
-
72
- MIT
 
1
+ ---
2
+ license: mit
3
+ tags:
4
+ - pytorch
5
+ - safetensors
6
+ - threshold-logic
7
+ - neuromorphic
8
+ ---
9
+
10
+ # threshold-parity8
11
+
12
+ 8-bit parity function. Outputs 1 if odd number of inputs are high.
13
+
14
+ ## Function
15
+
16
+ parity8(b0..b7) = b0 XOR b1 XOR b2 XOR b3 XOR b4 XOR b5 XOR b6 XOR b7
17
+
18
+ ## Architecture
19
+
20
+ Balanced tree of 7 XOR2 gates:
21
+
22
+ ```
23
+ b0 b1 b2 b3 b4 b5 b6 b7
24
+ \/ \/ \/ \/
25
+ xor01 xor23 xor45 xor67 (Level 1: 4 gates)
26
+ \ / \ /
27
+ xor0123 xor4567 (Level 2: 2 gates)
28
+ \ /
29
+ xor_final (Level 3: 1 gate)
30
+ ```
31
+
32
+ Each XOR2 uses OR-NAND-AND structure (3 neurons, 9 params, magnitude 10).
33
+
34
+ ## Parameters
35
+
36
+ | | |
37
+ |---|---|
38
+ | Inputs | 8 |
39
+ | Outputs | 1 |
40
+ | Neurons | 21 |
41
+ | Layers | 6 |
42
+ | Parameters | 63 |
43
+ | Magnitude | 70 |
44
+
45
+ ## Usage
46
+
47
+ ```python
48
+ from safetensors.torch import load_file
49
+
50
+ w = load_file('model.safetensors')
51
+
52
+ def xor2(a, b, prefix):
53
+ or_out = int(a * w[f'{prefix}.or.weight'][0] + b * w[f'{prefix}.or.weight'][1] + w[f'{prefix}.or.bias'] >= 0)
54
+ nand_out = int(a * w[f'{prefix}.nand.weight'][0] + b * w[f'{prefix}.nand.weight'][1] + w[f'{prefix}.nand.bias'] >= 0)
55
+ return int(or_out * w[f'{prefix}.and.weight'][0] + nand_out * w[f'{prefix}.and.weight'][1] + w[f'{prefix}.and.bias'] >= 0)
56
+
57
+ def parity8(bits):
58
+ x01 = xor2(bits[0], bits[1], 'xor_01')
59
+ x23 = xor2(bits[2], bits[3], 'xor_23')
60
+ x45 = xor2(bits[4], bits[5], 'xor_45')
61
+ x67 = xor2(bits[6], bits[7], 'xor_67')
62
+ x0123 = xor2(x01, x23, 'xor_0123')
63
+ x4567 = xor2(x45, x67, 'xor_4567')
64
+ return xor2(x0123, x4567, 'xor_final')
65
+
66
+ print(parity8([1, 0, 1, 0, 1, 0, 1, 0])) # 0 (even)
67
+ print(parity8([1, 1, 1, 0, 0, 0, 0, 0])) # 1 (odd)
68
+ ```
69
+
70
+ ## License
71
+
72
+ MIT
config.json CHANGED
@@ -1,9 +1,9 @@
1
- {
2
- "name": "threshold-parity8",
3
- "description": "8-bit parity (XOR of 8 inputs)",
4
- "inputs": 8,
5
- "outputs": 1,
6
- "neurons": 21,
7
- "layers": 6,
8
- "parameters": 63
9
- }
 
1
+ {
2
+ "name": "threshold-parity8",
3
+ "description": "8-bit parity (XOR of 8 inputs)",
4
+ "inputs": 8,
5
+ "outputs": 1,
6
+ "neurons": 21,
7
+ "layers": 6,
8
+ "parameters": 63
9
+ }
create_safetensors.py CHANGED
@@ -1,61 +1,61 @@
1
- import torch
2
- from safetensors.torch import save_file
3
-
4
- # Balanced tree structure for 8-bit parity
5
- # Level 1: XOR(a,b), XOR(c,d), XOR(e,f), XOR(g,h)
6
- # Level 2: XOR(ab, cd), XOR(ef, gh)
7
- # Level 3: XOR(abcd, efgh)
8
-
9
- def xor_block(prefix):
10
- return {
11
- f'{prefix}.or.weight': torch.tensor([1.0, 1.0], dtype=torch.float32),
12
- f'{prefix}.or.bias': torch.tensor([-1.0], dtype=torch.float32),
13
- f'{prefix}.nand.weight': torch.tensor([-1.0, -1.0], dtype=torch.float32),
14
- f'{prefix}.nand.bias': torch.tensor([1.0], dtype=torch.float32),
15
- f'{prefix}.and.weight': torch.tensor([1.0, 1.0], dtype=torch.float32),
16
- f'{prefix}.and.bias': torch.tensor([-2.0], dtype=torch.float32),
17
- }
18
-
19
- weights = {}
20
- # Level 1
21
- weights.update(xor_block('xor_01'))
22
- weights.update(xor_block('xor_23'))
23
- weights.update(xor_block('xor_45'))
24
- weights.update(xor_block('xor_67'))
25
- # Level 2
26
- weights.update(xor_block('xor_0123'))
27
- weights.update(xor_block('xor_4567'))
28
- # Level 3
29
- weights.update(xor_block('xor_final'))
30
-
31
- save_file(weights, 'model.safetensors')
32
-
33
- def xor2(a, b, prefix):
34
- or_out = int(a * weights[f'{prefix}.or.weight'][0] + b * weights[f'{prefix}.or.weight'][1] + weights[f'{prefix}.or.bias'] >= 0)
35
- nand_out = int(a * weights[f'{prefix}.nand.weight'][0] + b * weights[f'{prefix}.nand.weight'][1] + weights[f'{prefix}.nand.bias'] >= 0)
36
- return int(or_out * weights[f'{prefix}.and.weight'][0] + nand_out * weights[f'{prefix}.and.weight'][1] + weights[f'{prefix}.and.bias'] >= 0)
37
-
38
- def parity8(bits):
39
- # Level 1
40
- x01 = xor2(bits[0], bits[1], 'xor_01')
41
- x23 = xor2(bits[2], bits[3], 'xor_23')
42
- x45 = xor2(bits[4], bits[5], 'xor_45')
43
- x67 = xor2(bits[6], bits[7], 'xor_67')
44
- # Level 2
45
- x0123 = xor2(x01, x23, 'xor_0123')
46
- x4567 = xor2(x45, x67, 'xor_4567')
47
- # Level 3
48
- return xor2(x0123, x4567, 'xor_final')
49
-
50
- print("Verifying parity8...")
51
- errors = 0
52
- for i in range(256):
53
- bits = [(i >> j) & 1 for j in range(8)]
54
- result = parity8(bits)
55
- expected = sum(bits) % 2
56
- if result != expected:
57
- errors += 1
58
- print(f"ERROR: parity({bits}) = {result}, expected {expected}")
59
- if errors == 0:
60
- print("All 256 test cases passed!")
61
- print(f"Magnitude: {sum(t.abs().sum().item() for t in weights.values()):.0f}")
 
1
+ import torch
2
+ from safetensors.torch import save_file
3
+
4
+ # Balanced tree structure for 8-bit parity
5
+ # Level 1: XOR(a,b), XOR(c,d), XOR(e,f), XOR(g,h)
6
+ # Level 2: XOR(ab, cd), XOR(ef, gh)
7
+ # Level 3: XOR(abcd, efgh)
8
+
9
+ def xor_block(prefix):
10
+ return {
11
+ f'{prefix}.or.weight': torch.tensor([1.0, 1.0], dtype=torch.float32),
12
+ f'{prefix}.or.bias': torch.tensor([-1.0], dtype=torch.float32),
13
+ f'{prefix}.nand.weight': torch.tensor([-1.0, -1.0], dtype=torch.float32),
14
+ f'{prefix}.nand.bias': torch.tensor([1.0], dtype=torch.float32),
15
+ f'{prefix}.and.weight': torch.tensor([1.0, 1.0], dtype=torch.float32),
16
+ f'{prefix}.and.bias': torch.tensor([-2.0], dtype=torch.float32),
17
+ }
18
+
19
+ weights = {}
20
+ # Level 1
21
+ weights.update(xor_block('xor_01'))
22
+ weights.update(xor_block('xor_23'))
23
+ weights.update(xor_block('xor_45'))
24
+ weights.update(xor_block('xor_67'))
25
+ # Level 2
26
+ weights.update(xor_block('xor_0123'))
27
+ weights.update(xor_block('xor_4567'))
28
+ # Level 3
29
+ weights.update(xor_block('xor_final'))
30
+
31
+ save_file(weights, 'model.safetensors')
32
+
33
+ def xor2(a, b, prefix):
34
+ or_out = int(a * weights[f'{prefix}.or.weight'][0] + b * weights[f'{prefix}.or.weight'][1] + weights[f'{prefix}.or.bias'] >= 0)
35
+ nand_out = int(a * weights[f'{prefix}.nand.weight'][0] + b * weights[f'{prefix}.nand.weight'][1] + weights[f'{prefix}.nand.bias'] >= 0)
36
+ return int(or_out * weights[f'{prefix}.and.weight'][0] + nand_out * weights[f'{prefix}.and.weight'][1] + weights[f'{prefix}.and.bias'] >= 0)
37
+
38
+ def parity8(bits):
39
+ # Level 1
40
+ x01 = xor2(bits[0], bits[1], 'xor_01')
41
+ x23 = xor2(bits[2], bits[3], 'xor_23')
42
+ x45 = xor2(bits[4], bits[5], 'xor_45')
43
+ x67 = xor2(bits[6], bits[7], 'xor_67')
44
+ # Level 2
45
+ x0123 = xor2(x01, x23, 'xor_0123')
46
+ x4567 = xor2(x45, x67, 'xor_4567')
47
+ # Level 3
48
+ return xor2(x0123, x4567, 'xor_final')
49
+
50
+ print("Verifying parity8...")
51
+ errors = 0
52
+ for i in range(256):
53
+ bits = [(i >> j) & 1 for j in range(8)]
54
+ result = parity8(bits)
55
+ expected = sum(bits) % 2
56
+ if result != expected:
57
+ errors += 1
58
+ print(f"ERROR: parity({bits}) = {result}, expected {expected}")
59
+ if errors == 0:
60
+ print("All 256 test cases passed!")
61
+ print(f"Magnitude: {sum(t.abs().sum().item() for t in weights.values()):.0f}")
model.py CHANGED
@@ -1,26 +1,26 @@
1
- import torch
2
- from safetensors.torch import load_file
3
-
4
- def load_model(path='model.safetensors'):
5
- return load_file(path)
6
-
7
- def xor2(a, b, prefix, w):
8
- or_out = int(a * w[f'{prefix}.or.weight'][0] + b * w[f'{prefix}.or.weight'][1] + w[f'{prefix}.or.bias'] >= 0)
9
- nand_out = int(a * w[f'{prefix}.nand.weight'][0] + b * w[f'{prefix}.nand.weight'][1] + w[f'{prefix}.nand.bias'] >= 0)
10
- return int(or_out * w[f'{prefix}.and.weight'][0] + nand_out * w[f'{prefix}.and.weight'][1] + w[f'{prefix}.and.bias'] >= 0)
11
-
12
- def parity8(bits, weights):
13
- x01 = xor2(bits[0], bits[1], 'xor_01', weights)
14
- x23 = xor2(bits[2], bits[3], 'xor_23', weights)
15
- x45 = xor2(bits[4], bits[5], 'xor_45', weights)
16
- x67 = xor2(bits[6], bits[7], 'xor_67', weights)
17
- x0123 = xor2(x01, x23, 'xor_0123', weights)
18
- x4567 = xor2(x45, x67, 'xor_4567', weights)
19
- return xor2(x0123, x4567, 'xor_final', weights)
20
-
21
- if __name__ == '__main__':
22
- w = load_model()
23
- print('parity8 selected outputs:')
24
- for n_ones in range(9):
25
- bits = [1 if j < n_ones else 0 for j in range(8)]
26
- print(f' {n_ones} ones: {parity8(bits, w)}')
 
1
+ import torch
2
+ from safetensors.torch import load_file
3
+
4
+ def load_model(path='model.safetensors'):
5
+ return load_file(path)
6
+
7
+ def xor2(a, b, prefix, w):
8
+ or_out = int(a * w[f'{prefix}.or.weight'][0] + b * w[f'{prefix}.or.weight'][1] + w[f'{prefix}.or.bias'] >= 0)
9
+ nand_out = int(a * w[f'{prefix}.nand.weight'][0] + b * w[f'{prefix}.nand.weight'][1] + w[f'{prefix}.nand.bias'] >= 0)
10
+ return int(or_out * w[f'{prefix}.and.weight'][0] + nand_out * w[f'{prefix}.and.weight'][1] + w[f'{prefix}.and.bias'] >= 0)
11
+
12
+ def parity8(bits, weights):
13
+ x01 = xor2(bits[0], bits[1], 'xor_01', weights)
14
+ x23 = xor2(bits[2], bits[3], 'xor_23', weights)
15
+ x45 = xor2(bits[4], bits[5], 'xor_45', weights)
16
+ x67 = xor2(bits[6], bits[7], 'xor_67', weights)
17
+ x0123 = xor2(x01, x23, 'xor_0123', weights)
18
+ x4567 = xor2(x45, x67, 'xor_4567', weights)
19
+ return xor2(x0123, x4567, 'xor_final', weights)
20
+
21
+ if __name__ == '__main__':
22
+ w = load_model()
23
+ print('parity8 selected outputs:')
24
+ for n_ones in range(9):
25
+ bits = [1 if j < n_ones else 0 for j in range(8)]
26
+ print(f' {n_ones} ones: {parity8(bits, w)}')