File size: 1,497 Bytes
67ae1c5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
{
  "version": "2.0",
  "state_type": "cognitive",
  "components": {
    "neurogenesis": {
      "n_neurons": 32,
      "births": 0,
      "deaths": 32
    },
    "energy": {
      "energy": 0.05004558339715004,
      "consumed": 1214.7626953125
    },
    "dream": {
      "total_dreams": 1,
      "buffer_size": 256,
      "is_dreaming": true
    },
    "earcp_layers": [
      {
        "layer_idx": 0,
        "expert_count": 12,
        "low_coh_count": 59250
      },
      {
        "layer_idx": 1,
        "expert_count": 12,
        "low_coh_count": 60225
      },
      {
        "layer_idx": 2,
        "expert_count": 12,
        "low_coh_count": 61291
      },
      {
        "layer_idx": 3,
        "expert_count": 12,
        "low_coh_count": 61188
      },
      {
        "layer_idx": 4,
        "expert_count": 12,
        "low_coh_count": 82170
      },
      {
        "layer_idx": 5,
        "expert_count": 12,
        "low_coh_count": 58855
      },
      {
        "layer_idx": 6,
        "expert_count": 12,
        "low_coh_count": 56207
      },
      {
        "layer_idx": 7,
        "expert_count": 12,
        "low_coh_count": 78734
      }
    ],
    "self_trace": {
      "identity_norm": 0.11626036465167999,
      "n_traces": 84975
    },
    "memory": {
      "st_norm": 5.097438812255859,
      "lt_norm": 5.5504255294799805
    }
  },
  "training_summary": {
    "epochs_completed": 6,
    "final_loss": 0.01724659317859717,
    "total_neurogenesis": 0
  }
}