| { | |
| "model_type": "lighttiny", | |
| "architectures": [ | |
| "LightTinyForCausalLM" | |
| ], | |
| "name": "lighttiny-4m", | |
| "vocab_size": 8000, | |
| "hidden_dim": 128, | |
| "num_regions": 32, | |
| "field_size": 256, | |
| "field_depth": 488, | |
| "total_configs": 3997696, | |
| "total_params": 7102464, | |
| "layer3_resonance": { | |
| "max_active_ratio": 0.05, | |
| "activation_threshold": 0.08, | |
| "avg_active_ratio": 0.0, | |
| "total_activations": 0 | |
| }, | |
| "layer4_dynamics": { | |
| "coupling_strength": 0.1, | |
| "damping": 0.01, | |
| "dt": 0.01, | |
| "max_evolution_steps": 40, | |
| "convergence_threshold": 0.008, | |
| "min_coherence": 0.2 | |
| }, | |
| "bos_token_id": 2, | |
| "eos_token_id": 3, | |
| "pad_token_id": 0 | |
| } |