File size: 432 Bytes
6dbdf6d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
{
  "model_type": "tinystories",
  "architectures": ["WikiMiniModel"],
  "vocab_size": 10000,
  "d_model": 448,
  "n_layers": 7,
  "n_heads": 7,
  "d_ffn": 1344,
  "max_seq_len": 512,
  "max_position_embeddings": 512,
  "dropout": 0.0,
  "rope_percentage": 0.5,
  "rope_base": 10000,
  "rms_norm_eps": 1e-6,
  "tie_embeddings": true,
  "use_flash_attention": false,
  "torch_dtype": "bfloat16",
  "transformers_version": "4.30.0"
}