phanerozoic commited on
Commit
1c1c3df
·
verified ·
1 Parent(s): 1b2c49e

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. README.md +39 -0
  2. config.json +9 -0
  3. create_safetensors.py +65 -0
  4. model.safetensors +3 -0
README.md ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: mit
3
+ tags:
4
+ - pytorch
5
+ - safetensors
6
+ - threshold-logic
7
+ - neuromorphic
8
+ - sequential
9
+ - counter
10
+ ---
11
+
12
+ # threshold-down-counter
13
+
14
+ 4-bit down counter next-state logic. Computes N = (Q - 1) mod 16.
15
+
16
+ ## Circuit
17
+
18
+ ```
19
+ Q[3:0] ──► Next State Logic ──► N[3:0]
20
+ (decrementer)
21
+ ```
22
+
23
+ ## Sequence
24
+
25
+ 15 → 14 → 13 → ... → 1 → 0 → 15 (wraps)
26
+
27
+ ## Parameters
28
+
29
+ | | |
30
+ |---|---|
31
+ | Inputs | 4 |
32
+ | Outputs | 4 |
33
+ | Neurons | 6 |
34
+ | Parameters | 28 |
35
+ | Magnitude | 16 |
36
+
37
+ ## License
38
+
39
+ MIT
config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "threshold-down-counter",
3
+ "description": "4-bit down counter next-state logic",
4
+ "inputs": 4,
5
+ "outputs": 4,
6
+ "neurons": 6,
7
+ "layers": 3,
8
+ "parameters": 28
9
+ }
create_safetensors.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from safetensors.torch import save_file
3
+
4
+ weights = {}
5
+
6
+ # 4-bit Down Counter (Next State Logic)
7
+ # Input: Q[3:0] (current state)
8
+ # Output: N[3:0] (next state = Q - 1 mod 16)
9
+
10
+ def add_and(name, indices, n_inputs, invert=None):
11
+ w = [0.0] * n_inputs
12
+ bias = 0
13
+ for i in indices:
14
+ if invert and i in invert:
15
+ w[i] = -1.0
16
+ bias += 1
17
+ else:
18
+ w[i] = 1.0
19
+ weights[f'{name}.weight'] = torch.tensor([w], dtype=torch.float32)
20
+ weights[f'{name}.bias'] = torch.tensor([bias - float(len(indices))], dtype=torch.float32)
21
+
22
+ # N0 = NOT Q0
23
+ weights['n0.weight'] = torch.tensor([[-1.0, 0.0, 0.0, 0.0]], dtype=torch.float32)
24
+ weights['n0.bias'] = torch.tensor([0.0], dtype=torch.float32)
25
+
26
+ # N1 = Q1 XOR (NOT Q0) = Q1 XNOR Q0
27
+ # For down counter: toggle when all lower bits are 0
28
+ # N1 toggles when Q0 = 0
29
+ weights['n1_xnor.or.weight'] = torch.tensor([[1.0, 1.0, 0.0, 0.0]], dtype=torch.float32)
30
+ weights['n1_xnor.or.bias'] = torch.tensor([-1.0], dtype=torch.float32)
31
+ weights['n1_xnor.nand.weight'] = torch.tensor([[-1.0, -1.0, 0.0, 0.0]], dtype=torch.float32)
32
+ weights['n1_xnor.nand.bias'] = torch.tensor([1.0], dtype=torch.float32)
33
+ weights['n1_xnor.and.weight'] = torch.tensor([[1.0, 1.0]], dtype=torch.float32)
34
+ weights['n1_xnor.and.bias'] = torch.tensor([-2.0], dtype=torch.float32)
35
+
36
+ # Borrow signals (NOT of lower bits)
37
+ add_and('b1', [0, 1], 4, invert={0, 1}) # NOT Q0 AND NOT Q1
38
+ add_and('b2', [0, 1, 2], 4, invert={0, 1, 2}) # NOT Q0 AND NOT Q1 AND NOT Q2
39
+
40
+ save_file(weights, 'model.safetensors')
41
+
42
+ def down_counter(q3, q2, q1, q0):
43
+ Q = q3*8 + q2*4 + q1*2 + q0
44
+ N = (Q - 1) % 16
45
+ return (N >> 3) & 1, (N >> 2) & 1, (N >> 1) & 1, N & 1
46
+
47
+ print("Verifying 4-bit down counter...")
48
+ errors = 0
49
+ for q in range(16):
50
+ q3, q2, q1, q0 = (q>>3)&1, (q>>2)&1, (q>>1)&1, q&1
51
+ n3, n2, n1, n0 = down_counter(q3, q2, q1, q0)
52
+ result = n3*8 + n2*4 + n1*2 + n0
53
+ expected = (q - 1) % 16
54
+ if result != expected:
55
+ errors += 1
56
+ print(f"ERROR: {q} -> {result}, expected {expected}")
57
+
58
+ if errors == 0:
59
+ print("All 16 test cases passed!")
60
+ else:
61
+ print(f"FAILED: {errors} errors")
62
+
63
+ mag = sum(t.abs().sum().item() for t in weights.values())
64
+ print(f"Magnitude: {mag:.0f}")
65
+ print(f"Parameters: {sum(t.numel() for t in weights.values())}")
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99d5d513be69655961d0d858e7fee0c56796bb5eca64f97447cd0c75f8809e7d
3
+ size 936