File size: 942 Bytes
add86cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
{
  "model_name": "lighttiny-4m",
  "version": 1,
  "total_tensors": 7,
  "total_size_bytes": 44406688,
  "total_size_mb": 42.349517822265625,
  "output_dir": "exports/lighttiny-4m-trained",
  "model_config": {
    "model_type": "lighttiny",
    "architectures": [
      "LightTinyForCausalLM"
    ],
    "name": "lighttiny-4m",
    "vocab_size": 8000,
    "hidden_dim": 128,
    "num_regions": 32,
    "field_size": 256,
    "field_depth": 488,
    "total_configs": 3997696,
    "total_params": 7102464,
    "layer3_resonance": {
      "max_active_ratio": 0.05,
      "activation_threshold": 0.08,
      "avg_active_ratio": 0.0,
      "total_activations": 0
    },
    "layer4_dynamics": {
      "coupling_strength": 0.1,
      "damping": 0.01,
      "dt": 0.01,
      "max_evolution_steps": 40,
      "convergence_threshold": 0.008,
      "min_coherence": 0.2
    },
    "bos_token_id": 2,
    "eos_token_id": 3,
    "pad_token_id": 0
  }
}