File size: 217 Bytes
13c35e3
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
{
  "n_embed": 64,
  "n_head": 4,
  "n_layer": 4,
  "dropout": 0.1,
  "vocab_size": 22,
  "block_size": 14,  
  "architectures": ["TinyLLM"],
  "model_type": "tiny-causal-llm",
  "_from_model_config": true
}